Example #1
0
    def makePhaseQuadTable(self, inputs):
        """
        generates a phase table from CalMuonDetectorPhases
        """
        mantid.CloneWorkspace(InputWorkspace=inputs["InputWorkspace"],
                              OutputWorkspace="__tmp__")
        mantid.MaskDetectors(Workspace="__tmp__",
                             DetectorList=inputs['MaskedDetectors'],
                             StoreInADS=False)

        self.alg = mantid.AlgorithmManager.create("CalMuonDetectorPhases")
        self.alg.initialize()
        self.alg.setRethrows(True)

        self.alg.setProperty("FirstGoodData", inputs["FirstGoodData"])
        self.alg.setProperty("LastGoodData", inputs["LastGoodData"])

        self.alg.setProperty("InputWorkspace", "__tmp__")
        self.alg.setProperty("DetectorTable", "PhaseTable")
        self.alg.setProperty("DataFitted", "fits")

        self.alg.execute()
        mantid.DeleteWorkspace("__tmp__")
        mantid.DeleteWorkspace("fits")
        self.alg = None
Example #2
0
    def _mask_component(self, workspace, component_name):
        '''
        Masks component by name (e.g. "detector1" or "wing_detector".
        '''
        instrument = workspace.getInstrument()
        try:
            component = instrument.getComponentByName(component_name)
        except:
            Logger("SANSMask").error(
                "Component not valid! %s" % component_name)
            return

        masked_detectors = []
        if component.type() == 'RectangularDetector':
            id_min = component.minDetectorID()
            id_max = component.maxDetectorID()
            masked_detectors = list(range(id_min, id_max + 1))
        elif component.type() == 'CompAssembly' or component.type() == 'ObjCompAssembly' or component.type() == 'DetectorComponent':
            ids_gen = self.__get_ids_for_assembly(component)
            masked_detectors = list(ids_gen)
        else:
            Logger("SANSMask").error(
                "Mask not applied. Component not valid: %s of type %s." %
                (component.getName(), component.type()))

        api.MaskDetectors(Workspace=workspace, DetectorList=masked_detectors)
Example #3
0
    def PyExec(self):
        workspace = self.getProperty("Workspace").value
        facility = self.getProperty("Facility").value

        # Apply saved mask as needed
        self._apply_saved_mask(workspace, facility)

        # Mask a detector side
        self._mask_detector_side(workspace, facility)

        # Mask edges
        edges = self.getProperty("MaskedEdges").value
        if len(edges) == 4:
            if facility.upper() == "HFIR":
                masked_pixels = hfir_instrument.get_masked_pixels(
                    edges[0], edges[1], edges[2], edges[3], workspace)
            else:
                masked_pixels = sns_instrument.get_masked_pixels(
                    edges[0], edges[1], edges[2], edges[3], workspace)
            self._mask_pixels(masked_pixels, workspace, facility)

        # Mask a list of detectors
        masked_dets = self.getProperty("MaskedDetectorList").value
        if len(masked_dets) > 0:
            api.MaskDetectors(Workspace=workspace, DetectorList=masked_dets)

        self.setProperty("OutputMessage", "Mask applied")
Example #4
0
    def _calibData(self, sam_ws, mon_ws):
        sapi.MaskDetectors(Workspace=sam_ws,
                           DetectorList=self._dMask)
        sapi.ModeratorTzeroLinear(InputWorkspace=sam_ws,
                                  OutputWorkspace=sam_ws)
        sapi.LoadParameterFile(Workspace=sam_ws,
                               Filename=pjoin(DEFAULT_CONFIG_DIR,
                                              self._reflection["parameter_file"]))
        sapi.ConvertUnits(InputWorkspace=sam_ws,
                          OutputWorkspace=sam_ws,
                          Target='Wavelength',
                          EMode='Indirect')

        if self._MonNorm:
            sapi.ModeratorTzeroLinear(InputWorkspace=mon_ws,
                                      OutputWorkspace=mon_ws)
            sapi.Rebin(InputWorkspace=mon_ws,
                       OutputWorkspace=mon_ws,
                       Params='10')
            sapi.ConvertUnits(InputWorkspace=mon_ws,
                              OutputWorkspace=mon_ws,
                              Target='Wavelength')
            sapi.OneMinusExponentialCor(InputWorkspace=mon_ws,
                                        OutputWorkspace=mon_ws,
                                        C='0.20749999999999999',
                                        C1='0.001276')
            sapi.Scale(InputWorkspace=mon_ws,
                       OutputWorkspace=mon_ws,
                       Factor='1e-06')
            sapi.RebinToWorkspace(WorkspaceToRebin=sam_ws,
                                  WorkspaceToMatch=mon_ws,
                                  OutputWorkspace=sam_ws)
            sapi.Divide(LHSWorkspace=sam_ws,
                        RHSWorkspace=mon_ws,
                        OutputWorkspace=sam_ws)
Example #5
0
    def PhaseQuad(self, inputs):
        """
        do the phaseQuad algorithm
        groups data into a single set
        """
        cloned_workspace = mantid.CloneWorkspace(
            InputWorkspace=inputs["InputWorkspace"], StoreInADS=False)
        mantid.MaskDetectors(Workspace=cloned_workspace,
                             DetectorList=inputs['MaskedDetectors'],
                             StoreInADS=False)
        mantid.CropWorkspace(InputWorkspace=cloned_workspace,
                             XMin=inputs['FirstGoodData'],
                             XMax=inputs['LastGoodData'],
                             OutputWorkspace='cropped_workspace_pre_phasequad')

        self.alg = mantid.AlgorithmManager.create("PhaseQuad")
        self.alg.initialize()
        self.alg.setRethrows(True)

        self.alg.setProperty("InputWorkspace",
                             'cropped_workspace_pre_phasequad')
        self.alg.setProperty("PhaseTable", "PhaseTable")
        self.alg.setProperty("OutputWorkspace", "__phaseQuad__")
        self.alg.execute()
        mantid.DeleteWorkspace("cropped_workspace_pre_phasequad")
        self.alg = None
Example #6
0
    def _calibData(self, sam_ws, mon_ws):
        api.MaskDetectors(Workspace=sam_ws, DetectorList=self._dMask)
        #MaskedWorkspace='BASIS_MASK')
        api.ModeratorTzeroLinear(InputWorkspace=sam_ws,\
                           OutputWorkspace=sam_ws)
        api.LoadParameterFile(Workspace=sam_ws,
                              Filename=config.getInstrumentDirectory() +
                              'BASIS_silicon_111_Parameters.xml')
        api.ConvertUnits(InputWorkspace=sam_ws,
                         OutputWorkspace=sam_ws,
                         Target='Wavelength',
                         EMode='Indirect')

        if not self._noMonNorm:
            api.ModeratorTzeroLinear(InputWorkspace=mon_ws,\
                               OutputWorkspace=mon_ws)
            api.Rebin(InputWorkspace=mon_ws,
                      OutputWorkspace=mon_ws,
                      Params='10')
            api.ConvertUnits(InputWorkspace=mon_ws,
                             OutputWorkspace=mon_ws,
                             Target='Wavelength')
            api.OneMinusExponentialCor(InputWorkspace=mon_ws,
                                       OutputWorkspace=mon_ws,
                                       C='0.20749999999999999',
                                       C1='0.001276')
            api.Scale(InputWorkspace=mon_ws,
                      OutputWorkspace=mon_ws,
                      Factor='9.9999999999999995e-07')
            api.RebinToWorkspace(WorkspaceToRebin=sam_ws,
                                 WorkspaceToMatch=mon_ws,
                                 OutputWorkspace=sam_ws)
            api.Divide(LHSWorkspace=sam_ws,
                       RHSWorkspace=mon_ws,
                       OutputWorkspace=sam_ws)
Example #7
0
    def makePhaseQuadTable(self, inputs):
        """
        generates a phase table from CalMuonDetectorPhases
        """
        cloned_workspace_CalMuon = mantid.CloneWorkspace(
            InputWorkspace=inputs["InputWorkspace"], StoreInADS=False)
        mantid.MaskDetectors(Workspace=cloned_workspace_CalMuon,
                             DetectorList=inputs['MaskedDetectors'],
                             StoreInADS=False)

        self.alg = mantid.AlgorithmManager.create("CalMuonDetectorPhases")
        self.alg.initialize()
        self.alg.setAlwaysStoreInADS(False)
        self.alg.setRethrows(True)

        self.alg.setProperty("FirstGoodData", inputs["FirstGoodData"])
        self.alg.setProperty("LastGoodData", inputs["LastGoodData"])

        self.alg.setProperty("InputWorkspace", cloned_workspace_CalMuon)
        self.alg.setProperty("DetectorTable", "PhaseTable")
        self.alg.setProperty("DataFitted", "fits")

        self.alg.execute()
        mantid.AnalysisDataService.addOrReplace(
            "PhaseTable",
            self.alg.getProperty("DetectorTable").value)
        self.alg = None
Example #8
0
 def test_function_call_executes_when_algorithm_has_only_inout_workspace_props(
         self):
     data = [1.0, 2.0, 3.0, 4.0, 5.0, 6.0]
     wavelength = simpleapi.CreateWorkspace(data,
                                            data,
                                            NSpec=3,
                                            UnitX='Wavelength')
     simpleapi.MaskDetectors(wavelength, WorkspaceIndexList=[1, 2])
Example #9
0
def calculate_van_absorb_corrections(ws_to_correct, multiple_scattering, is_vanadium):
    mantid.MaskDetectors(ws_to_correct, SpectraList=list(range(1, 55)))

    absorb_dict = polaris_advanced_config.absorption_correction_params
    sample_details_obj = absorb_corrections.create_vanadium_sample_details_obj(config_dict=absorb_dict)
    ws_to_correct = absorb_corrections.run_cylinder_absorb_corrections(
        ws_to_correct=ws_to_correct, multiple_scattering=multiple_scattering, sample_details_obj=sample_details_obj,
        is_vanadium=is_vanadium)
    return ws_to_correct
Example #10
0
def calculate_van_absorb_corrections(ws_to_correct, multiple_scattering, is_vanadium):
    # First 100 detectors are monitors or not connected to DAE
    mantid.MaskDetectors(ws_to_correct, SpectraList=range(1, 101))

    absorb_dict = gem_advanced_config.absorption_correction_params
    sample_details_obj = absorb_corrections.create_vanadium_sample_details_obj(config_dict=absorb_dict)
    ws_to_correct = absorb_corrections.run_cylinder_absorb_corrections(
        ws_to_correct=ws_to_correct, multiple_scattering=multiple_scattering, sample_details_obj=sample_details_obj,
        is_vanadium=True)
    return ws_to_correct
Example #11
0
    def PyExec(self):
        workspace = self.getProperty("Workspace").value
        facility = self.getProperty("Facility").value
        component_name = self.getProperty("MaskedComponent").value

        # Apply saved mask as needed
        self._apply_saved_mask(workspace, facility)

        # Mask a detector side
        self._mask_detector_side(workspace, facility, component_name)

        # Mask edges
        edges = self.getProperty("MaskedEdges").value
        if len(edges) == 4:
            if facility.upper() == "HFIR":
                masked_ids = hfir_instrument.get_masked_ids(
                    edges[0], edges[1], edges[2], edges[3], workspace,
                    component_name)
                self._mask_ids(list(masked_ids), workspace)
            else:
                masked_pixels = sns_instrument.get_masked_pixels(
                    edges[0], edges[1], edges[2], edges[3], workspace)
                self._mask_pixels(masked_pixels, workspace, facility)

        # Mask a list of detectors
        masked_dets = self.getProperty("MaskedDetectorList").value
        if len(masked_dets) > 0:
            api.MaskDetectors(Workspace=workspace, DetectorList=masked_dets)

        # Mask component: Note that edges cannot be masked
        component_name = self.getProperty("MaskedFullComponent").value
        if component_name:
            Logger("SANSMask").debug("Masking FULL component named %s." %
                                     component_name)
            self._mask_component(workspace, component_name)

        masked_ws = self.getPropertyValue("MaskedWorkspace")
        if masked_ws:
            api.MaskDetectors(Workspace=workspace, MaskedWorkspace=masked_ws)

        self.setProperty("OutputMessage", "Mask applied")
Example #12
0
    def runTest(self):
        white = 'MAP17186.raw'
        sample = 'MAP17269.raw'

        # Libisis values to check against
        tiny = 1e-10
        huge = 1e10

        v_out_lo = 0.01
        v_out_hi = 100.

        vv_lo = 0.1
        vv_hi = 2.0
        vv_sig = 0.0

        sv_sig = 3.3
        sv_hi = 1.5
        sv_lo = 0.0
        s_zero = True

        reducer = reduction.setup_reducer('MAPS')
        # parameters which explicitly affect diagnostics
        #
        reducer.prop_man.wb_integr_range = [20, 300]
        reducer.prop_man.bkgd_range = [12000, 18000]
        diag_mask = reducer.diagnose(white,
                                     sample,
                                     tiny=tiny,
                                     huge=huge,
                                     van_out_lo=v_out_lo,
                                     van_out_hi=v_out_hi,
                                     van_lo=vv_lo,
                                     van_hi=vv_hi,
                                     van_sig=vv_sig,
                                     samp_lo=sv_lo,
                                     samp_hi=sv_hi,
                                     samp_sig=sv_sig,
                                     samp_zero=s_zero,
                                     hard_mask_file=None)

        sample = reducer.get_run_descriptor(sample)
        sample_ws = sample.get_workspace()
        ms.MaskDetectors(Workspace=sample_ws, MaskedWorkspace=diag_mask)

        # Save the masked spectra nmubers to a simple ASCII file for comparison
        self.saved_diag_file = os.path.join(ms.config['defaultsave.directory'],
                                            'CurrentDirectInelasticDiag.txt')
        with open(self.saved_diag_file, 'w') as handle:
            spectrumInfo = sample_ws.spectrumInfo()
            for index in range(sample_ws.getNumberHistograms()):
                if spectrumInfo.isMasked(index):
                    spec_no = sample_ws.getSpectrum(index).getSpectrumNo()
                    handle.write(str(spec_no) + '\n')
Example #13
0
 def _mask_pixels(self, pixel_list, workspace, facility):
     if len(pixel_list) > 0:
         # Transform the list of pixels into a list of Mantid detector IDs
         if facility.upper() == "HFIR":
             masked_detectors = hfir_instrument.get_detector_from_pixel(
                 pixel_list)
         else:
             masked_detectors = sns_instrument.get_detector_from_pixel(
                 pixel_list, workspace)
         # Mask the pixels by passing the list of IDs
         api.MaskDetectors(Workspace=workspace,
                           DetectorList=masked_detectors)
Example #14
0
 def _calibrate_data(self, run_set, sam_ws):
     sapi.MaskDetectors(Workspace=sam_ws, DetectorList=self._dMask)
     rpf = self._elucidate_reflection_parameter_file(sam_ws)
     sapi.LoadParameterFile(Workspace=sam_ws, Filename=rpf)
     sapi.ModeratorTzeroLinear(InputWorkspace=sam_ws,
                               OutputWorkspace=sam_ws)
     sapi.ConvertUnits(InputWorkspace=sam_ws,
                       OutputWorkspace=sam_ws,
                       Target='Wavelength', EMode='Indirect')
     if self._flux_normalization_type is not None:
         flux_ws = self._generate_flux_spectrum(run_set, sam_ws)
         sapi.RebinToWorkspace(WorkspaceToRebin=sam_ws,
                               WorkspaceToMatch=flux_ws,
                               OutputWorkspace=sam_ws)
         sapi.Divide(LHSWorkspace=sam_ws, RHSWorkspace=flux_ws,
                     OutputWorkspace=sam_ws)
Example #15
0
    def show_instrument(self, file_name=None, workspace=None, tab=-1, reload=False, mask=None, data_proxy=None):
        """
            Show instrument for the given data file.
            If both file_name and workspace are given, the file will be loaded in
            a workspace with the given name.

            @param file_name: Data file path
            @param workspace: Workspace to create
            @param tab: Tab to open the instrument window in
        """
        file_name = str(file_name)

        def _show_ws_instrument(ws):
            if not AnalysisDataService.doesExist(ws):
                return

            # Do nothing if the instrument view is already displayed
            #FIXME: this doesn't seem to work 100% yet
            if False and self._instrument_view is not None and \
                self._data_set_viewed == file_name \
                and self._instrument_view.isVisible():

                # If we want a reload, close the instrument window currently shown
                if reload:
                    self._instrument_view.close()
                else:
                    return True

            self._instrument_view = mantidplot.getInstrumentView(ws, tab)
            if self._instrument_view is not None:
                self._instrument_view.show()
                self._data_set_viewed = file_name
                return True

            return False

        # Sanity check
        if not IS_IN_MANTIDPLOT:
            return

        # Set up workspace name
        if workspace is None:
            workspace = '__'+os.path.basename(file_name)

        # See if the file is already loaded
        if not reload and _show_ws_instrument(workspace):
            return

        if data_proxy is None:
            data_proxy = self._data_proxy

        if data_proxy is not None:
            proxy = data_proxy(file_name, workspace)
            if proxy.data_ws is not None:
                if mask is not None:
                    api.MaskDetectors(Workspace=proxy.data_ws, DetectorList=mask)
                _show_ws_instrument(proxy.data_ws)
            else:
                if hasattr(proxy, 'errors'):
                    if type(proxy.errors)==list:
                        for e in proxy.errors:
                            print e
                    else:
                        print proxy.errors
                QtGui.QMessageBox.warning(self, "Data Error", "Mantid doesn't know how to load this file")
        else:
            QtGui.QMessageBox.warning(self, "Data Error", "Mantid doesn't know how to load this file")
Example #16
0
    def PyExec(self):
        self._runs = self.getProperty('RunNumbers').value
        self._vanfile = self.getProperty('Vanadium').value
        self._ecruns = self.getProperty('EmptyCanRunNumbers').value
        self._ebins = (self.getProperty('EnergyBins').value).tolist()
        self._qbins = (self.getProperty('MomentumTransferBins').value).tolist()
        self._snorm = self.getProperty('NormalizeSlices').value
        self._clean = self.getProperty('CleanWorkspaces').value
        wn_sqes = self.getPropertyValue("OutputWorkspace")

        # workspace names
        prefix = ''
        if self._clean:
            prefix = '__'
        # "wn" denotes workspace name
        wn_data = prefix + 'data'  # Accumulated data events
        wn_data_mon = prefix + 'data_monitors'  # Accumulated monitors for data
        wn_van = prefix + 'vanadium'  # White-beam vanadium
        wn_van_st = prefix + 'vanadium_S_theta'
        wn_reduced = prefix + 'reduced'  # data after DGSReduction
        wn_ste = prefix + 'S_theta_E'  # data after grouping by theta angle
        wn_sten = prefix + 'S_theta_E_normalized'
        wn_steni = prefix + 'S_theta_E_interp'
        wn_sqe = prefix + 'S_Q_E'
        wn_sqeb = prefix + 'S_Q_E_binned'
        wn_sqesn = prefix + wn_sqes + '_norm'
        # Empty can files
        wn_ec_data = prefix + 'ec_data'  # Accumulated empty can data
        wn_ec_data_mon = prefix + 'ec_data_monitors'  # Accumulated monitors for empty can
        wn_ec_reduced = prefix + 'ec_reduced'  # empty can data after DGSReduction
        wn_ec_ste = prefix + 'ec_S_theta_E'  # empty can data after grouping by theta angle

        # Save current configuration
        facility = config['default.facility']
        instrument = config['default.instrument']
        datasearch = config["datasearch.searcharchive"]
        # Allows searching for ARCS run numbers
        config['default.facility'] = 'SNS'
        config['default.instrument'] = 'ARCS'
        config["datasearch.searcharchive"] = "On"

        try:
            # Load the vanadium file, assumed to be preprocessed, meaning that
            # for every detector all events within a particular wide wavelength
            # range have been rebinned into a single histogram
            self._load(self._vanfile, wn_van)
            # Check for white-beam vanadium, true if the vertical chopper is absent (vChTrans==2)
            if api.mtd[wn_van].run().getProperty('vChTrans').value[0] != 2:
                raise ValueError("White-vanadium is required")

            # Load several event files into a single workspace. The nominal incident
            # energy should be the same to avoid difference in energy resolution
            self._load(self._runs, wn_data)

            # Load empty can event files, if present
            if self._ecruns:
                self._load(self._ecruns, wn_ec_data)

        finally:
            # Recover the default configuration
            config['default.facility'] = facility
            config['default.instrument'] = instrument
            config["datasearch.searcharchive"] = datasearch

        # Obtain incident energy as the mean of the nominal Ei values.
        # There is one nominal value for each run number.
        ws_data = sapi.mtd[wn_data]
        Ei = ws_data.getRun()['EnergyRequest'].getStatistics().mean
        Ei_std = ws_data.getRun()['EnergyRequest'].getStatistics(
        ).standard_deviation

        # Verify empty can runs were obtained at similar energy
        if self._ecruns:
            ws_ec_data = sapi.mtd[wn_ec_data]
            ec_Ei = ws_ec_data.getRun()['EnergyRequest'].getStatistics().mean
            if abs(Ei - ec_Ei) > Ei_std:
                raise RuntimeError(
                    'Empty can runs were obtained at a significant' +
                    ' different incident energy than the sample runs')

        # Obtain energy range. If user did not supply a triad
        # [Estart, Ewidth, Eend] but only Ewidth, then estimate
        # Estart and End from the nominal energies
        if len(self._ebins) == 1:
            ws_data = sapi.mtd[wn_data]
            Ei = ws_data.getRun()['EnergyRequest'].getStatistics().mean
            self._ebins.insert(0, -0.5 * Ei)  # prepend
            self._ebins.append(0.95 * Ei)  # append

        # Enforce that the elastic energy (E=0) lies in the middle of the
        # central bin with an appropriate small shift in the energy range
        Ei_min_reduced = self._ebins[0] / self._ebins[1]
        remainder = Ei_min_reduced - int(Ei_min_reduced)
        if remainder >= 0.0:
            erange_shift = self._ebins[1] * (0.5 - remainder)
        else:
            erange_shift = self._ebins[1] * (-0.5 - remainder)
        self._ebins[0] += erange_shift  # shift minimum energy
        self._ebins[-1] += erange_shift  # shift maximum energy

        # Convert to energy transfer. Normalize by proton charge.
        # The output workspace is S(detector-id,E)
        factor = 0.1  # use a finer energy bin than the one passed (self._ebins[1])
        Erange = '{0},{1},{2}'.format(self._ebins[0], factor * self._ebins[1],
                                      self._ebins[2])
        Ei_calc, T0 = sapi.GetEiT0atSNS(MonitorWorkspace=wn_data_mon,
                                        IncidentEnergyGuess=Ei)
        sapi.MaskDetectors(Workspace=wn_data,
                           MaskedWorkspace=wn_van)  # Use vanadium mask
        sapi.DgsReduction(SampleInputWorkspace=wn_data,
                          SampleInputMonitorWorkspace=wn_data_mon,
                          IncidentEnergyGuess=Ei_calc,
                          UseIncidentEnergyGuess=1,
                          TimeZeroGuess=T0,
                          EnergyTransferRange=Erange,
                          IncidentBeamNormalisation='ByCurrent',
                          OutputWorkspace=wn_reduced)

        if self._ecruns:
            sapi.MaskDetectors(Workspace=wn_ec_data, MaskedWorkspace=wn_van)
            sapi.DgsReduction(SampleInputWorkspace=wn_ec_data,
                              SampleInputMonitorWorkspace=wn_ec_data_mon,
                              IncidentEnergyGuess=Ei_calc,
                              UseIncidentEnergyGuess=1,
                              TimeZeroGuess=T0,
                              EnergyTransferRange=Erange,
                              IncidentBeamNormalisation='ByCurrent',
                              OutputWorkspace=wn_ec_reduced)

        # Obtain maximum and minimum |Q| values, as well as dQ if none passed
        if len(self._qbins) < 3:
            if not self._qbins:
                # insert dQ if empty qbins. The minimal momentum transfer
                # is the result on an event where the initial energy was
                # Ei and the final energy was Ei+dE.
                dE = self._ebins[1]
                self._qbins.append(
                    numpy.sqrt((Ei + dE) / ENERGY_TO_WAVEVECTOR) -
                    numpy.sqrt(Ei / ENERGY_TO_WAVEVECTOR))
            mins, maxs = sapi.ConvertToMDMinMaxLocal(wn_reduced,
                                                     Qdimensions='|Q|',
                                                     dEAnalysisMode='Direct')
            self._qbins.insert(0, mins[0])  # prepend minimum Q
            self._qbins.append(maxs[0])  # append maximum Q

        # Delete sample and empty can event workspaces to free memory.
        if self._clean:
            sapi.DeleteWorkspace(wn_data)
            if self._ecruns:
                sapi.DeleteWorkspace(wn_ec_data)

        # Convert to S(theta,E)
        ki = numpy.sqrt(Ei / ENERGY_TO_WAVEVECTOR)
        # If dE is the smallest energy transfer considered,
        # then dQ/ki is the smallest dtheta (in radians)
        dtheta = self._qbins[1] / ki * (180.0 / numpy.pi)
        # Use a finer dtheta that the nominal smallest value
        factor = 1. / 5  # a reasonable (heuristic) value
        dtheta *= factor
        # Fix: a very small dtheta (<0.15 degrees) prevents correct interpolation
        dtheta = max(0.15, dtheta)
        # Group detectors according to theta angle for the sample runs
        group_file_os_handle, group_file_name = mkstemp(suffix='.xml')
        group_file_handle = os.fdopen(group_file_os_handle, 'w')
        sapi.GenerateGroupingPowder(InputWorkspace=wn_reduced,
                                    AngleStep=dtheta,
                                    GroupingFilename=group_file_name)
        group_file_handle.close()
        sapi.GroupDetectors(InputWorkspace=wn_reduced,
                            MapFile=group_file_name,
                            OutputWorkspace=wn_ste)
        # Group detectors according to theta angle for the emtpy can run
        if self._ecruns:
            sapi.GroupDetectors(InputWorkspace=wn_ec_reduced,
                                MapFile=group_file_name,
                                OutputWorkspace=wn_ec_ste)
            # Subtract the empty can from the can+sample
            sapi.Minus(LHSWorkspace=wn_ste,
                       RHSWorkspace=wn_ec_ste,
                       OutputWorkspace=wn_ste)

        # Normalize by the vanadium intensity, but before that we need S(theta)
        # for the vanadium. Recall every detector has all energies into a single
        # bin, so we get S(theta) instead of S(theta,E)
        sapi.GroupDetectors(InputWorkspace=wn_van,
                            MapFile=group_file_name,
                            OutputWorkspace=wn_van_st)
        # Divide by vanadium. Make sure it is integrated in the energy domain
        sapi.Integration(wn_van_st, OutputWorkspace=wn_van_st)
        sapi.Divide(wn_ste, wn_van_st, OutputWorkspace=wn_sten)
        sapi.ClearMaskFlag(Workspace=wn_sten)

        # Temporary file generated by GenerateGroupingPowder to be removed
        os.remove(group_file_name)  # no need for this file
        os.remove(os.path.splitext(group_file_name)[0] + ".par")

        max_i_theta = 0.0
        min_i_theta = 0.0

        # Linear interpolation for those theta values with low intensity
        # First, find minimum theta index with a non-zero histogram
        ws_sten = sapi.mtd[wn_sten]

        for i_theta in range(ws_sten.getNumberHistograms()):
            if ws_sten.dataY(i_theta).any():
                min_i_theta = i_theta
                break
        # second, find maximum theta with a non-zero histogram
        for i_theta in range(ws_sten.getNumberHistograms() - 1, -1, -1):
            if ws_sten.dataY(i_theta).any():
                max_i_theta = i_theta
                break

        # Scan a range of theta angles and apply interpolation to those theta angles
        # with considerably low intensity (gaps)
        delta_theta = max_i_theta - min_i_theta
        gaps = self._findGaps(wn_sten, int(min_i_theta + 0.1 * delta_theta),
                              int(max_i_theta - 0.1 * delta_theta))
        sapi.CloneWorkspace(InputWorkspace=wn_sten, OutputWorkspace=wn_steni)
        for gap in gaps:
            self._interpolate(wn_steni, gap)  # interpolate this gap

        # Convert S(theta,E) to S(Q,E), then rebin in |Q| and E to MD workspace
        sapi.ConvertToMD(InputWorkspace=wn_steni,
                         QDimensions='|Q|',
                         dEAnalysisMode='Direct',
                         OutputWorkspace=wn_sqe)
        Qmin = self._qbins[0]
        Qmax = self._qbins[-1]
        dQ = self._qbins[1]
        Qrange = '|Q|,{0},{1},{2}'.format(Qmin, Qmax, int((Qmax - Qmin) / dQ))
        Ei_min = self._ebins[0]
        Ei_max = self._ebins[-1]
        dE = self._ebins[1]
        deltaErange = 'DeltaE,{0},{1},{2}'.format(Ei_min, Ei_max,
                                                  int((Ei_max - Ei_min) / dE))
        sapi.BinMD(InputWorkspace=wn_sqe,
                   AxisAligned=1,
                   AlignedDim0=Qrange,
                   AlignedDim1=deltaErange,
                   OutputWorkspace=wn_sqeb)

        # Slice the data by transforming to a Matrix2Dworkspace,
        # with deltaE along the vertical axis
        sapi.ConvertMDHistoToMatrixWorkspace(
            InputWorkspace=wn_sqeb,
            Normalization='NumEventsNormalization',
            OutputWorkspace=wn_sqes)

        # Ensure correct units
        sapi.mtd[wn_sqes].getAxis(0).setUnit("MomentumTransfer")
        sapi.mtd[wn_sqes].getAxis(1).setUnit("DeltaE")

        # Shift the energy axis, since the reported values should be the center
        # of the bins, instead of the minimum bin boundary
        ws_sqes = sapi.mtd[wn_sqes]
        Eaxis = ws_sqes.getAxis(1)
        e_shift = self._ebins[1] / 2.0
        for i in range(Eaxis.length()):
            Eaxis.setValue(i, Eaxis.getValue(i) + e_shift)

        # Normalize each slice, if requested
        if self._snorm:
            sapi.Integration(InputWorkspace=wn_sqes, OutputWorkspace=wn_sqesn)
            sapi.Divide(LHSWorkspace=wn_sqes,
                        RHSWorkspace=wn_sqesn,
                        OutputWorkspace=wn_sqes)

        # Clean up workspaces from intermediate steps
        if self._clean:
            for name in (wn_van, wn_reduced, wn_ste, wn_van_st, wn_sten,
                         wn_steni, wn_sqe, wn_sqeb, wn_sqesn,
                         'PreprocessedDetectorsWS'):
                if sapi.mtd.doesExist(name):
                    sapi.DeleteWorkspace(name)

        # Ouput some info as a Notice in the log
        ebins = ', '.join(['{0:.2f}'.format(x) for x in self._ebins])
        qbins = ', '.join(['{0:.2f}'.format(x) for x in self._qbins])
        tbins = '{0:.2f} {1:.2f} {2:.2f}'.format(min_i_theta * dtheta, dtheta,
                                                 max_i_theta * dtheta)
        message = '\n******  SOME OUTPUT INFORMATION ***' + \
                  '\nEnergy bins: ' + ebins + \
                  '\nQ bins: ' + qbins + \
                  '\nTheta bins: '+tbins
        kapi.logger.notice(message)

        self.setProperty("OutputWorkspace", sapi.mtd[wn_sqes])
Example #17
0
    def PyExec(self):
        config['default.facility'] = 'SNS'
        config['default.instrument'] = self._long_inst
        self._reflection =\
            REFLECTIONS_DICT[self.getProperty('ReflectionType').value]
        self._doIndiv = self.getProperty('DoIndividual').value
        # micro-eV to mili-eV
        self._etBins = 1.E-03 * self.getProperty('EnergyBins').value
        self._qBins = self.getProperty('MomentumTransferBins').value
        self._qBins[0] -= self._qBins[1]/2.0  # leftmost bin boundary
        self._qBins[2] += self._qBins[1]/2.0  # rightmost bin boundary
        self._MonNorm = self.getProperty('MonitorNorm').value
        self._maskFile = self.getProperty('MaskFile').value
        maskfile = self.getProperty('MaskFile').value
        self._maskFile = maskfile if maskfile else\
            pjoin(DEFAULT_MASK_GROUP_DIR, self._reflection['mask_file'])
        self._groupDetOpt = self.getProperty('GroupDetectors').value
        self._normalizeToFirst = self.getProperty('NormalizeToFirst').value
        self._doNorm = self.getProperty('DivideByVanadium').value

        # retrieve properties pertaining to saving to NXSPE file
        self._nsxpe_do = self.getProperty('SaveNXSPE').value
        if self._nsxpe_do:
            self._nxspe_psi_angle_log = self.getProperty('PsiAngleLog').value
            self._nxspe_offset = self.getProperty('PsiOffset').value

        datasearch = config["datasearch.searcharchive"]
        if datasearch != "On":
            config["datasearch.searcharchive"] = "On"

        # Apply default mask if not supplied by user
        self._overrideMask = bool(self._maskFile)
        if not self._overrideMask:
            config.appendDataSearchDir(DEFAULT_MASK_GROUP_DIR)
            self._maskFile = self._reflection["mask_file"]

        sapi.LoadMask(Instrument='BASIS',
                      OutputWorkspace='BASIS_MASK',
                      InputFile=self._maskFile)

        # Work around length issue
        _dMask = sapi.ExtractMask('BASIS_MASK')
        self._dMask = _dMask[1]
        sapi.DeleteWorkspace(_dMask[0])

        ############################
        ##  Process the Vanadium  ##
        ############################

        norm_runs = self.getProperty("NormRunNumbers").value
        if self._doNorm and bool(norm_runs):
            if ";" in norm_runs:
                raise SyntaxError("Normalization does not support run groups")
            self._normalizationType = self.getProperty("NormalizationType").value
            self.log().information("Divide by Vanadium with normalization" +
                                   self._normalizationType)

            # Following steps common to all types of Vanadium normalization

            # norm_runs encompasses a single set, thus _getRuns returns
            # a list of only one item
            norm_set = self._getRuns(norm_runs, doIndiv=False)[0]
            normWs = self._sum_and_calibrate(norm_set, extra_extension="_norm")

            normRange = self.getProperty("NormWavelengthRange").value
            bin_width = normRange[1] - normRange[0]
            # This rebin integrates counts onto a histogram of a single bin
            if self._normalizationType == "by detector ID":
                self._normRange = [normRange[0], bin_width, normRange[1]]
                sapi.Rebin(InputWorkspace=normWs,
                           OutputWorkspace=normWs,
                           Params=self._normRange)
                self._normWs = normWs
            # FindDetectorsOutsideLimits to be substituted by MedianDetectorTest
            sapi.FindDetectorsOutsideLimits(InputWorkspace=normWs,
                                            LowThreshold=1.0*bin_width,
                                            # no count events outside ranges
                                            RangeLower=normRange[0],
                                            RangeUpper=normRange[1],
                                            OutputWorkspace='BASIS_NORM_MASK')
            # additional reduction steps when normalizing by Q slice
            if self._normalizationType == "by Q slice":
                self._normWs = self._group_and_SofQW(normWs, self._etBins,
                                                     isSample=False)

        ##########################
        ##  Process the sample  ##
        ##########################
        self._run_list = self._getRuns(self.getProperty("RunNumbers").value,
                                       doIndiv=self._doIndiv)
        for run_set in self._run_list:
            self._samWs = self._sum_and_calibrate(run_set)
            self._samWsRun = str(run_set[0])
            # Divide by Vanadium detector ID, if pertinent
            if self._normalizationType == "by detector ID":
                # Mask detectors with insufficient Vanadium signal before dividing
                sapi.MaskDetectors(Workspace=self._samWs,
                                   MaskedWorkspace='BASIS_NORM_MASK')
                sapi.Divide(LHSWorkspace=self._samWs,
                            RHSWorkspace=self._normWs,
                            OutputWorkspace=self._samWs)
            # additional reduction steps
            self._samSqwWs = self._group_and_SofQW(self._samWs, self._etBins,
                                                   isSample=True)
            if not self._debugMode:
                sapi.DeleteWorkspace(self._samWs)  # delete events file
            # Divide by Vanadium Q slice, if pertinent
            if self._normalizationType == "by Q slice":
                sapi.Divide(LHSWorkspace=self._samSqwWs,
                            RHSWorkspace=self._normWs,
                            OutputWorkspace=self._samSqwWs)
            # Clear mask from reduced file. Needed for binary operations
            # involving this S(Q,w)
            sapi.ClearMaskFlag(Workspace=self._samSqwWs)
            # Scale so that elastic line has Y-values ~ 1
            if self._normalizeToFirst:
                self._ScaleY(self._samSqwWs)

            # Transform the vertical axis (Q) to point data
            # Q-values are in X-axis now
            sapi.Transpose(InputWorkspace=self._samSqwWs,
                           OutputWorkspace=self._samSqwWs)
            # from histo to point
            sapi.ConvertToPointData(InputWorkspace=self._samSqwWs,
                                    OutputWorkspace=self._samSqwWs)
            # Q-values back to vertical axis
            sapi.Transpose(InputWorkspace=self._samSqwWs,
                           OutputWorkspace=self._samSqwWs)
            self.serialize_in_log(self._samSqwWs)  # store the call
            # Output Dave and Nexus files
            extension = "_divided.dat" if self._doNorm else ".dat"
            dave_grp_filename = self._makeRunName(self._samWsRun, False) +\
                extension
            sapi.SaveDaveGrp(Filename=dave_grp_filename,
                             InputWorkspace=self._samSqwWs,
                             ToMicroEV=True)
            extension = "_divided_sqw.nxs" if self._doNorm else "_sqw.nxs"
            processed_filename = self._makeRunName(self._samWsRun, False) +\
                extension
            sapi.SaveNexus(Filename=processed_filename,
                           InputWorkspace=self._samSqwWs)

            # additional output
            if self.getProperty("OutputSusceptibility").value:
                temperature = mtd[self._samSqwWs].getRun().\
                    getProperty(TEMPERATURE_SENSOR).getStatistics().mean
                samXqsWs = self._samSqwWs.replace("sqw", "Xqw")
                sapi.ApplyDetailedBalance(InputWorkspace=self._samSqwWs,
                                          OutputWorkspace=samXqsWs,
                                          Temperature=str(temperature))
                sapi.ConvertUnits(InputWorkspace=samXqsWs,
                                  OutputWorkspace=samXqsWs,
                                  Target="DeltaE_inFrequency",
                                  Emode="Indirect")
                self.serialize_in_log(samXqsWs)
                susceptibility_filename = processed_filename.replace("sqw", "Xqw")
                sapi.SaveNexus(Filename=susceptibility_filename,
                               InputWorkspace=samXqsWs)

        if not self._debugMode:
            sapi.DeleteWorkspace("BASIS_MASK")  # delete the mask
            if self._doNorm and bool(norm_runs):
                sapi.DeleteWorkspace("BASIS_NORM_MASK")  # delete vanadium mask
                sapi.DeleteWorkspace(self._normWs)  # Delete vanadium S(Q)
                if self._normalizationType == "by Q slice":
                    sapi.DeleteWorkspace(normWs)  # Delete vanadium events file
            if self.getProperty("ExcludeTimeSegment").value:
                sapi.DeleteWorkspace('splitter')
                [sapi.DeleteWorkspace(name) for name in
                 ('splitted_unfiltered', 'TOFCorrectWS') if
                 AnalysisDataService.doesExist(name)]
Example #18
0
    def PyExec(self):
        config['default.facility'] = "SNS"
        config['default.instrument'] = self._long_inst
        self._reflection = REFLECTIONS_DICT[self.getProperty(
            "ReflectionType").value]
        self._doIndiv = self.getProperty("DoIndividual").value
        self._etBins = 1.E-03 * self.getProperty(
            "EnergyBins").value  # micro-eV to mili-eV
        self._qBins = self.getProperty("MomentumTransferBins").value
        self._qBins[0] -= self._qBins[
            1] / 2.0  # self._qBins[0] is leftmost bin boundary
        self._qBins[2] += self._qBins[
            1] / 2.0  # self._qBins[2] is rightmost bin boundary
        self._noMonNorm = self.getProperty("NoMonitorNorm").value
        self._maskFile = self.getProperty("MaskFile").value
        self._groupDetOpt = self.getProperty("GroupDetectors").value
        self._normalizeToFirst = self.getProperty("NormalizeToFirst").value
        self._doNorm = self.getProperty("DivideByVanadium").value

        datasearch = config["datasearch.searcharchive"]
        if datasearch != "On":
            config["datasearch.searcharchive"] = "On"

        # Apply default mask if not supplied by user
        self._overrideMask = bool(self._maskFile)
        if not self._overrideMask:
            config.appendDataSearchDir(DEFAULT_MASK_GROUP_DIR)
            self._maskFile = self._reflection["mask_file"]

        sapi.LoadMask(Instrument='BASIS',
                      OutputWorkspace='BASIS_MASK',
                      InputFile=self._maskFile)

        # Work around length issue
        _dMask = sapi.ExtractMask('BASIS_MASK')
        self._dMask = _dMask[1]
        sapi.DeleteWorkspace(_dMask[0])

        ############################
        ##  Process the Vanadium  ##
        ############################

        norm_runs = self.getProperty("NormRunNumbers").value
        if self._doNorm and bool(norm_runs):
            if ";" in norm_runs:
                raise SyntaxError("Normalization does not support run groups")
            self._normalizationType = self.getProperty(
                "NormalizationType").value
            self.log().information("Divide by Vanadium with normalization" +
                                   self._normalizationType)

            # The following steps are common to all types of Vanadium normalization

            # norm_runs encompasses a single set, thus _getRuns returns
            # a list of only one item
            norm_set = self._getRuns(norm_runs, doIndiv=False)[0]
            normWs = self._sum_and_calibrate(norm_set, extra_extension="_norm")

            # This rebin integrates counts onto a histogram of a single bin
            if self._normalizationType == "by detectorID":
                normRange = self.getProperty("NormWavelengthRange").value
                self._normRange = [
                    normRange[0], normRange[1] - normRange[0], normRange[1]
                ]
                sapi.Rebin(InputWorkspace=normWs,
                           OutputWorkspace=normWs,
                           Params=self._normRange)

            # FindDetectorsOutsideLimits to be substituted by MedianDetectorTest
            sapi.FindDetectorsOutsideLimits(InputWorkspace=normWs,
                                            OutputWorkspace="BASIS_NORM_MASK")

            # additional reduction steps when normalizing by Q slice
            if self._normalizationType == "by Q slice":
                self._normWs = self._group_and_SofQW(normWs,
                                                     self._etBins,
                                                     isSample=False)
            if not self._debugMode:
                sapi.DeleteWorkspace(normWs)  # Delete vanadium events file

        ##########################
        ##  Process the sample  ##
        ##########################
        self._run_list = self._getRuns(self.getProperty("RunNumbers").value,
                                       doIndiv=self._doIndiv)
        for run_set in self._run_list:
            self._samWs = self._sum_and_calibrate(run_set)
            self._samWsRun = str(run_set[0])
            # Divide by Vanadium detector ID, if pertinent
            if self._normalizationType == "by detector ID":
                # Mask detectors with insufficient Vanadium signal before dividing
                sapi.MaskDetectors(Workspace=self._samWs,
                                   MaskedWorkspace='BASIS_NORM_MASK')
                sapi.Divide(LHSWorkspace=self._samWs,
                            RHSWorkspace=self._normWs,
                            OutputWorkspace=self._samWs)
            # additional reduction steps
            self._samSqwWs = self._group_and_SofQW(self._samWs,
                                                   self._etBins,
                                                   isSample=True)
            if not self._debugMode:
                sapi.DeleteWorkspace(self._samWs)  # delete events file
            # Divide by Vanadium Q slice, if pertinent
            if self._normalizationType == "by Q slice":
                sapi.Divide(LHSWorkspace=self._samSqwWs,
                            RHSWorkspace=self._normWs,
                            OutputWorkspace=self._samSqwWs)
            # Clear mask from reduced file. Needed for binary operations
            # involving this S(Q,w)
            sapi.ClearMaskFlag(Workspace=self._samSqwWs)
            # Scale so that elastic line has Y-values ~ 1
            if self._normalizeToFirst:
                self._ScaleY(self._samSqwWs)
            # Transform the vertical axis to point data
            sapi.Transpose(
                InputWorkspace=self._samSqwWs,
                OutputWorkspace=self._samSqwWs)  # Q-values are in X-axis now
            sapi.ConvertToPointData(
                InputWorkspace=self._samSqwWs,
                OutputWorkspace=self._samSqwWs)  # from histo to point
            sapi.Transpose(InputWorkspace=self._samSqwWs,
                           OutputWorkspace=self._samSqwWs
                           )  # Q-values back to vertical axis
            # Output Dave and Nexus files
            extension = "_divided.dat" if self._doNorm else ".dat"
            dave_grp_filename = self._makeRunName(self._samWsRun,
                                                  False) + extension
            sapi.SaveDaveGrp(Filename=dave_grp_filename,
                             InputWorkspace=self._samSqwWs,
                             ToMicroEV=True)
            extension = "_divided_sqw.nxs" if self._doNorm else "_sqw.nxs"
            processed_filename = self._makeRunName(self._samWsRun,
                                                   False) + extension
            sapi.SaveNexus(Filename=processed_filename,
                           InputWorkspace=self._samSqwWs)

        if not self._debugMode:
            sapi.DeleteWorkspace("BASIS_MASK")  # delete the mask
            if self._doNorm and bool(norm_runs):
                sapi.DeleteWorkspace("BASIS_NORM_MASK")  # delete vanadium mask
                sapi.DeleteWorkspace(self._normWs)  # Delete vanadium S(Q)
Example #19
0
    def PyExec(self):
        config['default.facility'] = "SNS"
        config['default.instrument'] = self._long_inst
        self._doIndiv = self.getProperty("DoIndividual").value
        self._etBins = self.getProperty(
            "EnergyBins").value / MICROEV_TO_MILLIEV
        self._qBins = self.getProperty("MomentumTransferBins").value
        self._noMonNorm = self.getProperty("NoMonitorNorm").value
        self._maskFile = self.getProperty("MaskFile").value
        self._groupDetOpt = self.getProperty("GroupDetectors").value

        datasearch = config["datasearch.searcharchive"]
        if datasearch != "On":
            config["datasearch.searcharchive"] = "On"

        # Handle masking file override if necessary
        self._overrideMask = bool(self._maskFile)
        if not self._overrideMask:
            config.appendDataSearchDir(DEFAULT_MASK_GROUP_DIR)
            self._maskFile = DEFAULT_MASK_FILE

        api.LoadMask(Instrument='BASIS',
                     OutputWorkspace='BASIS_MASK',
                     InputFile=self._maskFile)

        # Work around length issue
        _dMask = api.ExtractMask('BASIS_MASK')
        self._dMask = _dMask[1]
        api.DeleteWorkspace(_dMask[0])

        # Do normalization if run numbers are present
        norm_runs = self.getProperty("NormRunNumbers").value
        self._doNorm = bool(norm_runs)
        self.log().information("Do Norm: " + str(self._doNorm))
        if self._doNorm:
            if ";" in norm_runs:
                raise SyntaxError("Normalization does not support run groups")
# Setup the integration (rebin) parameters
            normRange = self.getProperty("NormWavelengthRange").value
            self._normRange = [
                normRange[0], normRange[1] - normRange[0], normRange[1]
            ]

            # Process normalization runs
            self._norm_run_list = self._getRuns(norm_runs)
            for norm_set in self._norm_run_list:
                extra_extension = "_norm"
                self._normWs = self._makeRunName(norm_set[0])
                self._normWs += extra_extension
                self._normMonWs = self._normWs + "_monitors"
                self._sumRuns(norm_set, self._normWs, self._normMonWs,
                              extra_extension)
                self._calibData(self._normWs, self._normMonWs)

            api.Rebin(InputWorkspace=self._normWs, OutputWorkspace=self._normWs,\
              Params=self._normRange)
            api.FindDetectorsOutsideLimits(InputWorkspace=self._normWs,\
           OutputWorkspace="BASIS_NORM_MASK")

        self._run_list = self._getRuns(self.getProperty("RunNumbers").value)
        for run_set in self._run_list:
            self._samWs = self._makeRunName(run_set[0])
            self._samMonWs = self._samWs + "_monitors"
            self._samWsRun = str(run_set[0])

            self._sumRuns(run_set, self._samWs, self._samMonWs)
            # After files are all added, run the reduction
            self._calibData(self._samWs, self._samMonWs)

            if self._doNorm:
                api.MaskDetectors(Workspace=self._samWs,\
                 MaskedWorkspace='BASIS_NORM_MASK')
                api.Divide(LHSWorkspace=self._samWs, RHSWorkspace=self._normWs,\
           OutputWorkspace=self._samWs)

            api.ConvertUnits(InputWorkspace=self._samWs,
                             OutputWorkspace=self._samWs,
                             Target='DeltaE',
                             EMode='Indirect')
            api.CorrectKiKf(InputWorkspace=self._samWs,
                            OutputWorkspace=self._samWs,
                            EMode='Indirect')

            api.Rebin(InputWorkspace=self._samWs,
                      OutputWorkspace=self._samWs,
                      Params=self._etBins)
            if self._groupDetOpt != "None":
                if self._groupDetOpt == "Low-Resolution":
                    grp_file = "BASIS_Grouping_LR.xml"
                else:
                    grp_file = "BASIS_Grouping.xml"
                # If mask override used, we need to add default grouping file location to
                # search paths
                if self._overrideMask:
                    config.appendDataSearchDir(DEFAULT_MASK_GROUP_DIR)

                api.GroupDetectors(InputWorkspace=self._samWs,
                                   OutputWorkspace=self._samWs,
                                   MapFile=grp_file,
                                   Behaviour="Sum")

            self._samSqwWs = self._samWs + '_sqw'
            api.SofQW3(InputWorkspace=self._samWs,
                       OutputWorkspace=self._samSqwWs,
                       QAxisBinning=self._qBins,
                       EMode='Indirect',
                       EFixed=DEFAULT_ENERGY)

            dave_grp_filename = self._makeRunName(self._samWsRun,
                                                  False) + ".dat"
            api.SaveDaveGrp(Filename=dave_grp_filename,
                            InputWorkspace=self._samSqwWs,
                            ToMicroEV=True)
            processed_filename = self._makeRunName(self._samWsRun,
                                                   False) + "_sqw.nxs"
            api.SaveNexus(Filename=processed_filename,
                          InputWorkspace=self._samSqwWs)
Example #20
0
 def _mask_ids(self, id_list, workspace):
     # There is some error with the id_list and np.long64
     new_id_list = [int(i) for i in id_list]
     api.MaskDetectors(Workspace=workspace, DetectorList=new_id_list)
Example #21
0
    def PyExec(self):
        config['default.facility'] = 'SNS'
        config['default.instrument'] = 'ARCS'
        self._runs = self.getProperty('RunNumbers').value
        self._vanfile = self.getProperty('Vanadium').value
        self._ecruns = self.getProperty('EmptyCanRunNumbers').value
        self._ebins_str = self.getProperty('EnergyBins').value
        self._qbins_str = self.getProperty('MomentumTransferBins').value
        self._snorm = self.getProperty('NormalizeSlices').value
        self._clean = self.getProperty('CleanWorkspaces').value
        wn_sqes = self.getPropertyValue("OutputWorkspace")

        # workspace names
        prefix = ''
        if self._clean:
            prefix = '__'
        # Sample files
        wn_data = prefix + 'data'
        wn_van = prefix + 'vanadium'
        wn_reduced = prefix + 'reduced'
        wn_ste = prefix + 'S_theta_E'
        wn_van_st = prefix + 'vanadium_S_theta'
        wn_sten = prefix + 'S_theta_E_normalized'
        wn_steni = prefix + 'S_theta_E_normalized_interp'
        wn_sqe = prefix + 'S_Q_E'
        wn_sqeb = prefix + 'S_Q_E_binned'
        wn_sqesn = prefix + wn_sqes + '_norm'
        # Empty can files
        wn_ec_data = prefix + 'ec_data'
        wn_ec_reduced = prefix + 'ec_reduced'
        wn_ec_ste = prefix + 'ec_S_theta_E'

        datasearch = config["datasearch.searcharchive"]
        if datasearch != "On":
            config["datasearch.searcharchive"] = "On"

        # Load several event files into a sinle workspace. The nominal incident
        # energy should be the same to avoid difference in energy resolution
        api.Load(Filename=self._runs, OutputWorkspace=wn_data)

        # Load the vanadium file, assume to be preprocessed, meaning that
        # for every detector all events whithin a particular wide wavelength
        # range have been rebinned into a single histogram
        api.Load(Filename=self._vanfile, OutputWorkspace=wn_van)

        # Load empty can event files, if present
        if self._ecruns:
            api.Load(Filename=self._ecruns, OutputWorkspace=wn_ec_data)

        # Retrieve the mask from the vanadium workspace, and apply it to the data
        # (and empty can, if submitted)
        api.MaskDetectors(Workspace=wn_data, MaskedWorkspace=wn_van)
        if self._ecruns:
            api.MaskDetectors(Workspace=wn_ec_data, MaskedWorkspace=wn_van)

        # Obtain incident energy as the mean of the nominal Ei values.
        # There is one nominal value per events file.
        ws_data = api.mtd[wn_data]
        Ei = ws_data.getRun()['EnergyRequest'].getStatistics().mean
        Ei_std = ws_data.getRun()['EnergyRequest'].getStatistics(
        ).standard_deviation

        # Verify empty can runs were obtained at similar energy
        if self._ecruns:
            ws_ec_data = api.mtd[wn_ec_data]
            ec_Ei = ws_ec_data.getRun()['EnergyRequest'].getStatistics().mean
            if abs(Ei - ec_Ei) > Ei_std:
                raise RuntimeError(
                    'Empty can runs were obtained at a significant' +
                    ' different incident energy than the sample runs')

        # Obtain energy range
        self._ebins = [
            float(x)
            for x in re.compile(r'\d+[\.\d+]*').findall(self._ebins_str)
        ]
        if len(self._ebins) == 1:
            ws_data = api.mtd[wn_data]
            Ei = ws_data.getRun()['EnergyRequest'].getStatistics().mean
            self._ebins.insert(0, -0.5 * Ei)  # prepend
            self._ebins.append(0.95 * Ei)  # append

        # Enforce that the elastic energy (E=0) lies in the middle of the
        # central bin with an appropriate small shift in the energy range
        Ei_min_reduced = self._ebins[0] / self._ebins[1]
        remainder = Ei_min_reduced - int(Ei_min_reduced)
        if remainder >= 0.0:
            erange_shift = self._ebins[1] * (0.5 - remainder)
        else:
            erange_shift = self._ebins[1] * (-0.5 - remainder)
        self._ebins[0] += erange_shift  # shift minimum energy
        self._ebins[-1] += erange_shift  # shift maximum energy

        # Convert to energy transfer. Normalize by proton charge.
        # The output workspace is S(detector-id,E)
        factor = 0.1  # a fine energy bin
        Erange = '{0},{1},{2}'.format(self._ebins[0], factor * self._ebins[1],
                                      self._ebins[2])
        api.DgsReduction(SampleInputWorkspace=wn_data,
                         EnergyTransferRange=Erange,
                         OutputWorkspace=wn_reduced)
        if self._ecruns:
            api.DgsReduction(SampleInputWorkspace=wn_ec_data,
                             EnergyTransferRange=Erange,
                             IncidentBeamNormalisation='ByCurrent',
                             OutputWorkspace=wn_ec_reduced)

        # Obtain maximum and minimum |Q| values, as well as dQ if none passed
        self._qbins = [
            float(x)
            for x in re.compile(r'\d+[\.\d+]*').findall(self._qbins_str)
        ]
        if len(self._qbins) < 3:
            if not self._qbins:
                # insert dQ if empty qbins
                dE = self._ebins[1]
                self._qbins.append(
                    numpy.sqrt((Ei + dE) / ENERGY_TO_WAVEVECTOR) -
                    numpy.sqrt(Ei / ENERGY_TO_WAVEVECTOR))
            mins, maxs = api.ConvertToMDMinMaxLocal(wn_reduced,
                                                    Qdimensions='|Q|',
                                                    dEAnalysisMode='Direct')
            self._qbins.insert(0, mins[0])  # prepend minimum Q
            self._qbins.append(maxs[0])  # append maximum Q

        # Clean up the events files. They take a lot of space in memory
        api.DeleteWorkspace(wn_data)
        if self._ecruns:
            api.DeleteWorkspace(wn_ec_data)

        # Convert to S(theta,E)
        ki = numpy.sqrt(Ei / ENERGY_TO_WAVEVECTOR)
        factor = 1. / 5  # a reasonable (heuristic) value
        # If dE is the smallest energy transfer considered,
        # then dQ/ki is the smallest dtheta (in radians)
        dtheta = factor * self._qbins[1] / ki * (180.0 / numpy.pi)
        # very small dtheta (<0.15 degrees) prevents interpolation
        dtheta = max(0.15, dtheta)
        group_file_os_handle, group_file_name = mkstemp(suffix='.xml')
        group_file_handle = os.fdopen(group_file_os_handle, 'w')
        api.GenerateGroupingPowder(InputWorkspace=wn_reduced,
                                   AngleStep=dtheta,
                                   GroupingFilename=group_file_name)
        group_file_handle.close()
        api.GroupDetectors(InputWorkspace=wn_reduced,
                           MapFile=group_file_name,
                           OutputWorkspace=wn_ste)
        if self._ecruns:
            api.GroupDetectors(InputWorkspace=wn_ec_reduced,
                               MapFile=group_file_name,
                               OutputWorkspace=wn_ec_ste)
            # Substract the empty can from the can+sample
            api.Minus(LHSWorkspace=wn_ste,
                      RHSWorkspace=wn_ec_ste,
                      OutputWorkspace=wn_ste)

        # Normalize by the vanadium intensity, but before that we need S(theta)
        # for the vanadium. Recall every detector has all energies into a single
        # bin, so we get S(theta) instead of S(theta,E)
        api.GroupDetectors(InputWorkspace=wn_van,
                           MapFile=group_file_name,
                           OutputWorkspace=wn_van_st)
        os.remove(group_file_name)  # no need for this file
        api.Divide(wn_ste, wn_van_st, OutputWorkspace=wn_sten)
        api.ClearMaskFlag(Workspace=wn_sten)

        max_i_theta = 0.0
        min_i_theta = 0.0

        # Linear interpolation
        # First, find minimum theta index with a non-zero histogram
        ws_sten = api.mtd[wn_sten]
        for i_theta in range(ws_sten.getNumberHistograms()):
            if ws_sten.dataY(i_theta).any():
                min_i_theta = i_theta
                break
        # second, find maximum theta with a non-zero histogram
        for i_theta in range(ws_sten.getNumberHistograms() - 1, -1, -1):
            if ws_sten.dataY(i_theta).any():
                max_i_theta = i_theta
                break
        # Scan the region [min_i_theta, max_i_theta] and apply interpolation to
        # theta angles with no signal whatsoever, S(theta*, E)=0.0 for all energies
        api.CloneWorkspace(InputWorkspace=wn_sten, OutputWorkspace=wn_steni)
        ws_steni = api.mtd[wn_steni]
        i_theta = 1 + min_i_theta
        while i_theta < max_i_theta:
            if not ws_steni.dataY(i_theta).any():
                nonnull_i_theta_start = i_theta - 1  # angle index of non-null histogram
                # scan until we find a non-null histogram
                while not ws_steni.dataY(i_theta).any():
                    i_theta += 1
                nonnull_i_theta_end = i_theta  # angle index of non-null histogram
                # The range [1+nonnull_i_theta_start, nonnull_i_theta_end]
                # contains only null-histograms. Interpolate!
                y_start = ws_steni.dataY(nonnull_i_theta_start)
                y_end = ws_steni.dataY(nonnull_i_theta_end)
                intercept = y_start
                slope = (y_end - y_start) / (nonnull_i_theta_end -
                                             nonnull_i_theta_start)
                for null_i_theta in range(1 + nonnull_i_theta_start,
                                          nonnull_i_theta_end):
                    ws_steni.dataY(null_i_theta)[:] = intercept + slope * (
                        null_i_theta - nonnull_i_theta_start)
            i_theta += 1

        # Convert S(theta,E) to S(Q,E), then rebin in |Q| and E to MD workspace
        api.ConvertToMD(InputWorkspace=wn_steni,
                        QDimensions='|Q|',
                        dEAnalysisMode='Direct',
                        OutputWorkspace=wn_sqe)
        Qmin = self._qbins[0]
        Qmax = self._qbins[-1]
        dQ = self._qbins[1]
        Qrange = '|Q|,{0},{1},{2}'.format(Qmin, Qmax, int((Qmax - Qmin) / dQ))
        Ei_min = self._ebins[0]
        Ei_max = self._ebins[-1]
        dE = self._ebins[1]
        deltaErange = 'DeltaE,{0},{1},{2}'.format(Ei_min, Ei_max,
                                                  int((Ei_max - Ei_min) / dE))
        api.BinMD(InputWorkspace=wn_sqe,
                  AxisAligned=1,
                  AlignedDim0=Qrange,
                  AlignedDim1=deltaErange,
                  OutputWorkspace=wn_sqeb)

        # Slice the data by transforming to a Matrix2Dworkspace, with deltaE along the vertical axis
        api.ConvertMDHistoToMatrixWorkspace(
            InputWorkspace=wn_sqeb,
            Normalization='NumEventsNormalization',
            OutputWorkspace=wn_sqes)

        # Shift the energy axis, since the reported values should be the center
        # of the bins, instead of the minimum bin boundary
        ws_sqes = api.mtd[wn_sqes]
        Eaxis = ws_sqes.getAxis(1)
        e_shift = self._ebins[1] / 2.0
        for i in range(Eaxis.length()):
            Eaxis.setValue(i, Eaxis.getValue(i) + e_shift)

        # Normalize each slice
        if self._snorm:
            api.Integration(InputWorkspace=wn_sqes, OutputWorkspace=wn_sqesn)
            api.Divide(LHSWorkspace=wn_sqes,
                       RHSWorkspace=wn_sqesn,
                       OutputWorkspace=wn_sqes)

        # Clean up workspaces from intermediate steps
        if self._clean:
            for name in (wn_van, wn_reduced, wn_ste, wn_van_st, wn_sten,
                         wn_steni, wn_sqe, wn_sqeb, wn_sqesn):
                api.DeleteWorkspace(name)
            if api.mtd.doesExist('PreprocessedDetectorsWS'):
                api.DeleteWorkspace('PreprocessedDetectorsWS')

        # Ouput some info
        message = '\n******  SOME OUTPUT INFORMATION ***' + \
                  '\nEnergy bins: ' + ', '.join(['{0:.2f}'.format(x) for x in self._ebins]) + \
                  '\nQ bins: ' + ', '.join(['{0:.2f}'.format(x) for x in self._qbins]) + \
                  '\nTheta bins: {0:.2f} {1:.2f} {2:.2f}'.format(min_i_theta * dtheta, dtheta, max_i_theta * dtheta)
        logger.notice(message)

        self.setProperty("OutputWorkspace", api.mtd[wn_sqes])
Example #22
0
    def PyExec(self):
        config['default.facility'] = "SNS"
        config['default.instrument'] = self._long_inst
        self._doIndiv = self.getProperty("DoIndividual").value
        self._etBins = self.getProperty(
            "EnergyBins").value / MICROEV_TO_MILLIEV
        self._qBins = self.getProperty("MomentumTransferBins").value
        self._noMonNorm = self.getProperty("NoMonitorNorm").value
        self._maskFile = self.getProperty("MaskFile").value
        self._groupDetOpt = self.getProperty("GroupDetectors").value
        self._normalizeToFirst = self.getProperty("NormalizeToFirst").value
        self._normalizeToVanadium = self.getProperty("GroupDetectors").value
        self._doNorm = self.getProperty("DivideByVanadium").value

        datasearch = config["datasearch.searcharchive"]
        if datasearch != "On":
            config["datasearch.searcharchive"] = "On"

        # Handle masking file override if necessary
        self._overrideMask = bool(self._maskFile)
        if not self._overrideMask:
            config.appendDataSearchDir(DEFAULT_MASK_GROUP_DIR)
            self._maskFile = DEFAULT_MASK_FILE

        api.LoadMask(Instrument='BASIS',
                     OutputWorkspace='BASIS_MASK',
                     InputFile=self._maskFile)

        # Work around length issue
        _dMask = api.ExtractMask('BASIS_MASK')
        self._dMask = _dMask[1]
        api.DeleteWorkspace(_dMask[0])

        ############################
        ##  Process the Vanadium  ##
        ############################

        norm_runs = self.getProperty("NormRunNumbers").value
        if self._doNorm and bool(norm_runs):
            if ";" in norm_runs:
                raise SyntaxError("Normalization does not support run groups")
            self._doNorm = self.getProperty("NormalizationType").value
            self.log().information("Divide by Vanadium with normalization" +
                                   self._doNorm)

            # The following steps are common to all types of Vanadium normalization

            # norm_runs encompasses a single set, thus _getRuns returns
            # a list of only one item
            norm_set = self._getRuns(norm_runs, doIndiv=False)[0]
            self._normWs = self._sum_and_calibrate(norm_set,
                                                   extra_extension="_norm")

            # This rebin integrates counts onto a histogram of a single bin
            if self._doNorm == "by detectorID":
                normRange = self.getProperty("NormWavelengthRange").value
                self._normRange = [
                    normRange[0], normRange[1] - normRange[0], normRange[1]
                ]
                api.Rebin(InputWorkspace=self._normWs,
                          OutputWorkspace=self._normWs,
                          Params=self._normRange)

            # FindDetectorsOutsideLimits to be substituted by MedianDetectorTest
            api.FindDetectorsOutsideLimits(InputWorkspace=self._normWs,
                                           OutputWorkspace="BASIS_NORM_MASK")

            # additional reduction steps when normalizing by Q slice
            if self._doNorm == "by Q slice":
                self._normWs = self._group_and_SofQW(self._normWs,
                                                     self._etBins,
                                                     isSample=False)

        ##########################
        ##  Process the sample  ##
        ##########################
        self._run_list = self._getRuns(self.getProperty("RunNumbers").value,
                                       doIndiv=self._doIndiv)
        for run_set in self._run_list:
            self._samWs = self._sum_and_calibrate(run_set)
            self._samWsRun = str(run_set[0])
            # Mask detectors with insufficient Vanadium signal
            if self._doNorm:
                api.MaskDetectors(Workspace=self._samWs,
                                  MaskedWorkspace='BASIS_NORM_MASK')
            # Divide by Vanadium
            if self._doNorm == "by detector ID":
                api.Divide(LHSWorkspace=self._samWs,
                           RHSWorkspace=self._normWs,
                           OutputWorkspace=self._samWs)
            # additional reduction steps
            self._samSqwWs = self._group_and_SofQW(self._samWs,
                                                   self._etBins,
                                                   isSample=True)
            # Divide by Vanadium
            if self._doNorm == "by Q slice":
                api.Integration(InputWorkspace=self._normWs,
                                OutputWorkspace=self._normWs,
                                RangeLower=DEFAULT_VANADIUM_ENERGY_RANGE[0],
                                RangeUpper=DEFAULT_VANADIUM_ENERGY_RANGE[1])
                api.Divide(LHSWorkspace=self._samSqwWs,
                           RHSWorkspace=self._normWs,
                           OutputWorkspace=self._samSqwWs)
            # Clear mask from reduced file. Needed for binary operations
            # involving this S(Q,w)
            api.ClearMaskFlag(Workspace=self._samSqwWs)
            # Scale so that elastic line has Y-values ~ 1
            if self._normalizeToFirst:
                self._ScaleY(self._samSqwWs)
            # Output Dave and Nexus files
            extension = "_divided.dat" if self._doNorm else ".dat"
            dave_grp_filename = self._makeRunName(self._samWsRun,
                                                  False) + extension
            api.SaveDaveGrp(Filename=dave_grp_filename,
                            InputWorkspace=self._samSqwWs,
                            ToMicroEV=True)
            extension = "_divided_sqw.nxs" if self._doNorm else "_sqw.nxs"
            processed_filename = self._makeRunName(self._samWsRun,
                                                   False) + extension
            api.SaveNexus(Filename=processed_filename,
                          InputWorkspace=self._samSqwWs)
    def run_bilby_reduction(self):
        # Read input csv file and define / create a folder for the output data
        csv_files_to_reduce_list = mantid_api.FileFinder.getFullPath(
            self.current_reduction_settings[0]["csv_file_name"])
        reduced_files_path = self.setup_save_out_path(csv_files_to_reduce_list)

        # Wavelength binning
        binning_wavelength_ini_str = self.retrieve_reduction_settings(
            "binning_wavelength_ini",
            raise_exception=True,
            message="binning_wavelength_ini cannot be empty")

        binning_wavelength_ini = BilbyCustomFunctions_Reduction.read_convert_to_float(
            binning_wavelength_ini_str)
        binning_wavelength_ini_original = binning_wavelength_ini

        # WAVELENGTH RANGE FOR TRANSMISSION: the aim is to fit transmission on the whole range, and take only part for the data reduction
        # must  be equal or longer than binning_wavelength_ini
        binning_wavelength_transmission_str = self.current_reduction_settings[
            0]["binning_wavelength_transmission"]
        binning_wavelength_transmission = BilbyCustomFunctions_Reduction.read_convert_to_float(
            binning_wavelength_transmission_str)
        binning_wavelength_transmission_original = binning_wavelength_transmission

        # Check of wavelength range: transmission range must be equal or longer than the wavelength binning range for data reduction
        if (binning_wavelength_ini[0] < binning_wavelength_transmission[0]
            ) or (binning_wavelength_ini[2] >
                  binning_wavelength_transmission[2]):
            raise ValueError(
                "Range for transmission binning shall be equal or wider than the range for the"
                " sample wavelength binning (refer to line 94)")

        # Binning for Q
        binning_q_str = self.current_reduction_settings[0]["binning_q"]
        binning_q = BilbyCustomFunctions_Reduction.read_convert_to_float(
            binning_q_str)

        RadiusCut = self.retrieve_reduction_settings("RadiusCut", default=0.0)
        WaveCut = self.retrieve_reduction_settings("WaveCut", default=0.0)

        # Transmission fit parameters
        transmission_fit_ini = self.current_reduction_settings[0][
            "transmission_fit"]
        if (transmission_fit_ini != "Linear") and (
                transmission_fit_ini != "Log") and (transmission_fit_ini !=
                                                    "Polynomial"):
            raise ValueError("Check value of transmission_fit; it can be only"
                             " \"Linear\", \"Log\" or \"Polynomial\","
                             " first letter is mandatory capital")

        PolynomialOrder = self.current_reduction_settings[0]["PolynomialOrder"]

        # Wavelength interval: if reduction on wavelength intervals is needed
        wavelength_interval_input = self.current_reduction_settings[0][
            "wavelength_intervals"].lower()
        wavelength_intervals = BilbyCustomFunctions_Reduction.string_boolean(
            wavelength_interval_input)
        wavelength_intervals_original = wavelength_intervals
        wav_delta = 0.0  # set the value, needed for the "wavelengh_slices" function

        if self.reduce_2D:
            print(
                "2D reduction is performing. Q interval and number of points are taking into account;"
                " Q-binning intervals are ignored.")
            number_data_points_2D = float(
                self.retrieve_reduction_settings(
                    "2D_number_data_points",
                    raise_exception=True,
                    message="Number of points shall be given"))

            plot_2D = self.current_reduction_settings[0]["plot_2D"].lower()
            plot_2D = BilbyCustomFunctions_Reduction.string_boolean(plot_2D)
            binning_q[1] = (
                binning_q[0] + binning_q[2]
            ) / number_data_points_2D  # To replace deltaQ from the input file
        else:
            plot_2D = None

        ######################################
        # Calling function to read given csv file
        parameters = BilbyCustomFunctions_Reduction.files_list_reduce(
            csv_files_to_reduce_list)
        files_to_reduce = BilbyCustomFunctions_Reduction.files_to_reduce(
            parameters, self.index_files_to_reduce)
        if len(files_to_reduce) == 0:
            raise ValueError(
                'Please check index_files_to_reduce; chosen one does not exist'
            )

        # reduce requested files one by one
        for current_file in files_to_reduce:
            sam_file = current_file["Sample"] + '.tar'

            ws_sam, time_range = self.setup_time_range(current_file, sam_file)

            # To read the mode value: True - ToF; False - NVS; this will define some steps inside SANSDataProcessor
            try:
                external_mode = (ws_sam.run().getProperty("is_tof").value)
            except:
                external_mode = True  # This is needed for old files, where the ToF/mono mode value has not been recorded

            # Internal frame source has been used during data collection; it is not always NVS only,
            # one can have both, NVS and choppers running for this mode
            if (not external_mode):
                print(
                    "Internal frame source. Binning range is taken from the sample scattering data."
                )
                binning_wavelength_ini = (ws_sam.readX(0)[0],
                                          ws_sam.readX(0)[ws_sam.blocksize()] -
                                          ws_sam.readX(0)[0],
                                          ws_sam.readX(0)[ws_sam.blocksize()])
                binning_wavelength_transmission = binning_wavelength_ini
                if wavelength_intervals:
                    wavelength_intervals = False
                    print("NVS: monochromatic mode")
                    print(
                        "There is no sense to reduce monochromatic data on multiple wavelength;"
                        " \"wavelength_intervals\" value changed to False.")
            else:
                # important for the case when NVS data is being analysed first,
                # ie to be able to come back to the whole range & wavelength slices, if needed
                binning_wavelength_ini = binning_wavelength_ini_original
                binning_wavelength_transmission = binning_wavelength_transmission_original
                wavelength_intervals = wavelength_intervals_original
                if wavelength_intervals:
                    wav_delta = float(
                        self.current_reduction_settings[0]["wav_delta"]
                    )  # no need to read if the previous is false

            # empty beam scattering in transmission mode
            ws_emp_file = current_file["T_EmptyBeam"] + '.tar'
            mantid_api.LoadBBY(
                ws_emp_file, OutputWorkspace='ws_emp'
            )  # Note that this is of course a transmission measurement - shall be long

            # transmission workspaces and masks
            transm_file = current_file["T_Sample"] + '.tar'
            ws_tranSam = mantid_api.LoadBBY(transm_file)
            ws_tranEmp = mantid_api.LoadBBY(
                ws_emp_file)  # empty beam for transmission
            transm_mask = current_file["mask_transmission"] + '.xml'
            ws_tranMsk = mantid_api.LoadMask('Bilby', transm_mask)

            sam_mask_file = current_file["mask"] + '.xml'
            ws_samMsk = mantid_api.LoadMask('Bilby', sam_mask_file)

            # scaling: attenuation
            att_pos = float(ws_tranSam.run().getProperty("att_pos").value)

            scale = BilbyCustomFunctions_Reduction.attenuation_correction(
                att_pos, self.data_before_May_2016)
            print("scale, aka attenuation factor {}".format(scale))

            thickness = current_file["thickness [cm]"]

            # Cd / Al masks shift
            if self.correct_tubes_shift:
                BilbyCustomFunctions_Reduction.correction_tubes_shift(
                    ws_sam, self.path_tube_shift_correction)

            if self.data_before_2016:
                BilbyCustomFunctions_Reduction.det_shift_before_2016(ws_sam)

                # Blocked beam
            if self.blocked_beam:
                ws_blocked_beam = current_file["BlockedBeam"] + '.tar'
                ws_blk = mantid_api.LoadBBY(ws_blocked_beam)
                if self.correct_tubes_shift:
                    BilbyCustomFunctions_Reduction.correction_tubes_shift(
                        ws_blk, self.path_tube_shift_correction)
            else:
                ws_blk = None

            # Detector sensitivity
            ws_sen = None

            # empty beam normalisation
            mantid_api.MaskDetectors(
                "ws_emp", MaskedWorkspace=ws_tranMsk
            )  # does not have to be ws_tranMsk, can be a specific mask
            mantid_api.ConvertUnits("ws_emp",
                                    Target="Wavelength",
                                    OutputWorkspace='ws_emp')

            # wavelenth intervals: building  binning_wavelength list
            binning_wavelength, n = BilbyCustomFunctions_Reduction.wavelengh_slices(
                wavelength_intervals, binning_wavelength_ini, wav_delta)

            # By now we know how many wavelengths bins we have, so shall run Q1D n times
            # -- Processing --
            suffix = '_' + current_file[
                "suffix"]  # is the same for all wavelength intervals
            suffix_2 = current_file["additional_description"]
            if suffix_2 != '':
                suffix += '_' + suffix_2

            plot1Dgraph = None

            for i in range(n):
                ws_emp_partial = mantid_api.Rebin("ws_emp",
                                                  Params=binning_wavelength[i])
                ws_emp_partial = mantid_api.SumSpectra(ws_emp_partial,
                                                       IncludeMonitors=False)

                base_output_name = self.get_base_output_name(
                    i, sam_file, binning_wavelength, time_range, suffix)

                # needed here, otherwise SANSDataProcessor replaced it with "transmission_fit" string
                transmission_fit = transmission_fit_ini

                output_workspace, transmission_fit = mantid_api.BilbySANSDataProcessor(
                    InputWorkspace=ws_sam,
                    InputMaskingWorkspace=ws_samMsk,
                    BlockedBeamWorkspace=ws_blk,
                    EmptyBeamSpectrumShapeWorkspace=ws_emp_partial,
                    SensitivityCorrectionMatrix=ws_sen,
                    TransmissionWorkspace=ws_tranSam,
                    TransmissionEmptyBeamWorkspace=ws_tranEmp,
                    TransmissionMaskingWorkspace=ws_tranMsk,
                    ScalingFactor=scale,
                    SampleThickness=thickness,
                    FitMethod=transmission_fit,
                    PolynomialOrder=PolynomialOrder,
                    BinningWavelength=binning_wavelength[i],
                    BinningWavelengthTransm=binning_wavelength_transmission,
                    BinningQ=binning_q,
                    TimeMode=external_mode,
                    AccountForGravity=self.account_for_gravity,
                    SolidAngleWeighting=self.solid_angle_weighting,
                    RadiusCut=RadiusCut,
                    WaveCut=WaveCut,
                    WideAngleCorrection=self.wide_angle_correction,
                    Reduce2D=self.reduce_2D,
                    OutputWorkspace=base_output_name)

                if not self.reduce_2D:
                    BilbyCustomFunctions_Reduction.strip_NaNs(
                        output_workspace, base_output_name)

                self.plot_graphs(i, reduced_files_path, base_output_name,
                                 output_workspace, plot_2D, plot1Dgraph)
                self.save_out_files(reduced_files_path, base_output_name,
                                    output_workspace)

                return output_workspace, transmission_fit
    def PyExec(self):
        """ Main execution body
        """
        input_ws = self.getProperty("InputWorkspace").value
        outws_name = self.getPropertyValue("OutputWorkspace")
        choice_tof = self.getProperty("ChoiceElasticTof").value

        run = input_ws.getRun()
        nb_hist = input_ws.getNumberHistograms()

        prog_reporter = Progress(self,
                                 start=0.0,
                                 end=1.0,
                                 nreports=nb_hist +
                                 1)  # extra call below when summing

        # find elastic time channel
        tof_elastic = np.zeros(nb_hist)
        channel_width = float(run.getLogData('channel_width').value)
        # t_el = epp_channel_number*channel_width + xmin
        t_el_default = float(
            run.getLogData('EPP').value) * channel_width + input_ws.readX(0)[0]

        if choice_tof == 'Geometry':
            # tof_elastic from header of raw datafile start guess on peak position
            tof_elastic.fill(t_el_default)

        if choice_tof == 'FitSample':
            prog_reporter.report("Fit function")
            for idx in range(nb_hist):
                tof_elastic[idx] = api.FitGaussian(input_ws, idx)[0]

        if choice_tof == 'FitVanadium':
            vanaws = self.getProperty("VanadiumWorkspace").value
            prog_reporter.report("Fit function")
            for idx in range(nb_hist):
                tof_elastic[idx] = api.FitGaussian(vanaws, idx)[0]

        self.log().debug("Tel = " + str(tof_elastic))

        outws = api.CloneWorkspace(input_ws, OutputWorkspace=outws_name)

        # mask detectors with EPP=0
        zeros = np.where(tof_elastic == 0)[0]
        if len(zeros) > 0:
            self.log().warning("Detectors " + str(zeros) +
                               " have EPP=0 and will be masked.")
            api.MaskDetectors(outws, DetectorList=zeros)
            # makes sence to convert units even for masked detectors, take EPP guess for that
            for idx in zeros:
                tof_elastic[idx] = t_el_default

        instrument = outws.getInstrument()
        sample = instrument.getSample()
        factor = sp.constants.m_n * 1e+15 / sp.constants.eV

        # calculate new values for dataX and data Y
        for idx in range(nb_hist):
            prog_reporter.report("Setting %dth spectrum" % idx)
            det = instrument.getDetector(
                outws.getSpectrum(idx).getDetectorIDs()[0])
            xbins = input_ws.readX(idx)  # take bin boundaries
            tof = xbins[:-1] + 0.5 * channel_width  # take middle of each bin
            sdd = det.getDistance(sample)
            # calculate new I = t^3*I(t)/(factor*sdd^2*dt)
            dataY = input_ws.readY(idx) * tof**3 / (factor * channel_width *
                                                    sdd * sdd)
            dataE = input_ws.readE(idx) * tof**3 / (factor * channel_width *
                                                    sdd * sdd)
            outws.setY(idx, dataY)
            outws.setE(idx, dataE)
            # calculate dE = factor*0.5*sdd^2*(1/t_el^2 - 1/t^2)
            dataX = 0.5 * factor * sdd * sdd * (1 / tof_elastic[idx]**2 -
                                                1 / xbins**2)
            outws.setX(idx, dataX)

        outws.getAxis(0).setUnit('DeltaE')
        outws.setDistribution(True)

        self.setProperty("OutputWorkspace", outws)
Example #25
0
    def _PyExec(self):
        # Collect Flux Normalization
        if self.getProperty('DoFluxNormalization').value is True:
            self._flux_normalization_type =\
                self.getProperty('FluxNormalizationType').value
            if self._flux_normalization_type == 'Monitor':
                self._MonNorm = True

        self._reflection =\
            REFLECTIONS_DICT[self.getProperty('ReflectionType').value]
        self._doIndiv = self.getProperty('DoIndividual').value

        # micro-eV to mili-eV
        self._etBins = 1.E-03 * self.getProperty('EnergyBins').value
        self._qBins = self.getProperty('MomentumTransferBins').value
        self._qBins[0] -= self._qBins[1] / 2.0  # leftmost bin boundary
        self._qBins[2] += self._qBins[1] / 2.0  # rightmost bin boundary

        self._maskFile = self.getProperty('MaskFile').value
        maskfile = self.getProperty('MaskFile').value
        self._maskFile = maskfile if maskfile else\
            pjoin(DEFAULT_MASK_GROUP_DIR, self._reflection['mask_file'])

        self._groupDetOpt = self.getProperty('GroupDetectors').value
        self._normalizeToFirst = self.getProperty('NormalizeToFirst').value
        self._doNorm = self.getProperty('DivideByVanadium').value

        # retrieve properties pertaining to saving to NXSPE file
        self._nsxpe_do = self.getProperty('SaveNXSPE').value
        if self._nsxpe_do:
            self._nxspe_psi_angle_log = self.getProperty('PsiAngleLog').value
            self._nxspe_offset = self.getProperty('PsiOffset').value

        # Apply default mask if not supplied by user
        self._overrideMask = bool(self._maskFile)
        if not self._overrideMask:
            mantid_config.appendDataSearchDir(DEFAULT_MASK_GROUP_DIR)
            self._maskFile = self._reflection['mask_file']

        self._maskWs = tws('BASIS_MASK')
        sapi.LoadMask(Instrument='BASIS',
                      OutputWorkspace=self._maskWs,
                      InputFile=self._maskFile)

        # Work around length issue
        _dMask = sapi.ExtractMask(InputWorkspace=self._maskWs,
                                  OutputWorkspace=tws('ExtractMask'))
        self._dMask = _dMask[1]

        #
        #  Process the Vanadium
        #
        norm_runs = self.getProperty('NormRunNumbers').value
        if self._doNorm and bool(norm_runs):
            self._normalizationType = self.getProperty(
                'NormalizationType').value
            self.log().information('Divide by Vanadium with normalization' +
                                   self._normalizationType)

            # Following steps common to all types of Vanadium normalization

            # norm_runs encompasses a single set, thus _getRuns returns
            # a list of only one item
            norm_set = self._get_runs(norm_runs, doIndiv=False)[0]
            normWs = tws(self._make_run_name(norm_set[0]) + '_vanadium')
            self._sum_and_calibrate(norm_set, normWs)

            normRange = self._reflection['vanadium_wav_range']
            bin_width = normRange[1] - normRange[0]
            # This rebin integrates counts onto a histogram of a single bin
            if self._normalizationType == 'by detector ID':
                self._normRange = [normRange[0], bin_width, normRange[1]]
                sapi.Rebin(InputWorkspace=normWs,
                           OutputWorkspace=normWs,
                           Params=self._normRange)
                self._normWs = normWs
            # Detectors outside limits are substituted by MedianDetectorTest
            self._normMask = tws('BASIS_NORM_MASK')
            sapi.FindDetectorsOutsideLimits(
                InputWorkspace=normWs,
                LowThreshold=1.0 * bin_width,
                # no count events outside ranges
                RangeLower=normRange[0],
                RangeUpper=normRange[1],
                OutputWorkspace=self._normMask)
            # additional reduction steps when normalizing by Q slice
            if self._normalizationType == 'by Q slice':
                self._normWs = self._group_and_SofQW(normWs,
                                                     normWs,
                                                     self._etBins,
                                                     isSample=False)
        #
        #  Process the sample
        #
        self._run_list = self._get_runs(self.getProperty('RunNumbers').value,
                                        doIndiv=self._doIndiv)
        for run_set in self._run_list:
            self._samWs = tws(self._make_run_name(run_set[0]))
            self._sum_and_calibrate(run_set, self._samWs)
            self._samWsRun = str(run_set[0])
            # Divide by Vanadium detector ID, if pertinent
            if self._normalizationType == 'by detector ID':
                # Mask detectors with low Vanadium signal before dividing
                sapi.MaskDetectors(Workspace=self._samWs,
                                   MaskedWorkspace=self._normMask)
                sapi.Divide(LHSWorkspace=self._samWs,
                            RHSWorkspace=self._normWs,
                            OutputWorkspace=self._samWs)
            # additional reduction steps
            prefix = self._make_run_name(run_set[0])
            self._samSqwWs = self._group_and_SofQW(self._samWs,
                                                   prefix,
                                                   self._etBins,
                                                   isSample=True)
            # Divide by Vanadium Q slice, if pertinent
            if self._normalizationType == 'by Q slice':
                sapi.Divide(LHSWorkspace=self._samSqwWs,
                            RHSWorkspace=self._normWs,
                            OutputWorkspace=self._samSqwWs)
            # Clear mask from reduced file. Needed for binary operations
            # involving this S(Q,w)
            sapi.ClearMaskFlag(Workspace=self._samSqwWs)
            # Scale so that elastic line has Y-values ~ 1
            if self._normalizeToFirst:
                self._ScaleY(self._samSqwWs)

            # Transform the vertical axis (Q) to point data
            # Q-values are in X-axis now
            sapi.Transpose(InputWorkspace=self._samSqwWs,
                           OutputWorkspace=self._samSqwWs)
            # from histo to point
            sapi.ConvertToPointData(InputWorkspace=self._samSqwWs,
                                    OutputWorkspace=self._samSqwWs)
            # Q-values back to vertical axis
            sapi.Transpose(InputWorkspace=self._samSqwWs,
                           OutputWorkspace=self._samSqwWs)
            self.serialize_in_log(self._samSqwWs)  # store the call
            # Output Dave and Nexus files
            extension = '_divided.dat' if self._doNorm else '.dat'
            dave_grp_filename = self._make_run_name(self._samWsRun, False) + \
                extension
            sapi.SaveDaveGrp(Filename=dave_grp_filename,
                             InputWorkspace=self._samSqwWs,
                             ToMicroEV=True)
            extension = '_divided_sqw.nxs' if self._doNorm else '_sqw.nxs'
            processed_filename = self._make_run_name(self._samWsRun, False) + \
                extension
            sapi.SaveNexus(Filename=processed_filename,
                           InputWorkspace=self._samSqwWs)

            # additional output
            if self.getProperty('OutputSusceptibility').value:
                temperature = mtd[self._samSqwWs].getRun().\
                    getProperty(TEMPERATURE_SENSOR).getStatistics().mean
                samXqsWs = self._samSqwWs.replace('sqw', 'Xqw')
                sapi.ApplyDetailedBalance(InputWorkspace=self._samSqwWs,
                                          OutputWorkspace=samXqsWs,
                                          Temperature=str(temperature))
                sapi.ConvertUnits(InputWorkspace=samXqsWs,
                                  OutputWorkspace=samXqsWs,
                                  Target='DeltaE_inFrequency')
                self.serialize_in_log(samXqsWs)
                susceptibility_filename = processed_filename.replace(
                    'sqw', 'Xqw')
                sapi.SaveNexus(Filename=susceptibility_filename,
                               InputWorkspace=samXqsWs)
            if self.getProperty('OutputPowderSpectrum').value:
                self.generatePowderSpectrum()