예제 #1
0
    def create_solid_angle_corrections(self, vanadium, run_details):
        """
        Creates the solid angle corrections from a vanadium run, only applicable on HRPD otherwise return None
        :param vanadium: The vanadium used to create this
        :param run_details: the run details of to use
        """
        if not self._inst_settings.do_solid_angle:
            return
        solid_angle = mantid.SolidAngle(InputWorkspace=vanadium)

        scale = mantid.CreateSingleValuedWorkspace(DataValue='100')
        correction = mantid.Multiply(LHSWorkspace=solid_angle,
                                     RHSWorkspace=scale)

        eff = mantid.Divide(LHSWorkspace=vanadium, RHSWorkspace=correction)
        eff = mantid.ConvertUnits(InputWorkspace=eff, Target='Wavelength')
        eff = mantid.Integration(InputWorkspace=eff,
                                 RangeLower='1.3999999999999999',
                                 RangeUpper='3')

        correction = mantid.Multiply(LHSWorkspace=correction, RHSWorkspace=eff)
        scale = mantid.CreateSingleValuedWorkspace(DataValue='100000')
        correction = mantid.Divide(LHSWorkspace=correction, RHSWorkspace=scale)

        name = "sac" + common.generate_splined_name(run_details.run_number, [])
        path = run_details.van_paths

        mantid.SaveNexus(InputWorkspace=correction,
                         Filename=os.path.join(path, name))

        common.remove_intermediate_workspace(solid_angle)
        common.remove_intermediate_workspace(scale)
        common.remove_intermediate_workspace(eff)
        common.remove_intermediate_workspace(correction)
예제 #2
0
def correct_for_multiple_scattering(ws_name,first_spectrum,last_spectrum, sample_properties, transmission_guess,
                                    multiple_scattering_order, number_of_events, g_log, masses, mean_intensity_ratios):
    g_log.debug( "Evaluating the Multiple Scattering Correction.")
    dens, trans = sapi.VesuvioThickness(Masses=masses, Amplitudes=mean_intensity_ratios, TransmissionGuess=transmission_guess,Thickness=0.1)
    _TotScattering, _MulScattering = sapi.VesuvioCalculateMS(ws_name, NoOfMasses=len(masses), SampleDensity=dens.cell(9,1),
                                                             AtomicProperties=sample_properties, BeamRadius=2.5,
                                                             NumScatters=multiple_scattering_order,
                                                             NumEventsPerRun=int(number_of_events))
    data_normalisation = sapi.Integration(ws_name)
    simulation_normalisation = sapi.Integration("_TotScattering")
    for workspace in ("_MulScattering","_TotScattering"):
        ws = sapi.mtd[workspace]
        for j in range(ws.getNumberHistograms()):
            for k in range(ws.blocksize()):
                ws.dataE(j)[
                         k] =0. # set the errors from the MonteCarlo simulation to zero - no propagation of such uncertainties
                #- Use high number of events for final corrections!!!
        sapi.Divide(LHSWorkspace = workspace, RHSWorkspace = simulation_normalisation, OutputWorkspace = workspace)
        sapi.Multiply(LHSWorkspace = workspace, RHSWorkspace = data_normalisation, OutputWorkspace = workspace)
        sapi.RenameWorkspace(InputWorkspace = workspace, OutputWorkspace = str(ws_name)+workspace)
    safe_delete_ws(data_normalisation)
    safe_delete_ws(simulation_normalisation)
    safe_delete_ws(trans)
    safe_delete_ws(dens)
    return
예제 #3
0
    def create_solid_angle_corrections(self, vanadium, run_details):
        """
        Creates the solid angle corrections from a vanadium run, only applicable on HRPD otherwise return None
        :param vanadium: The vanadium used to create this
        :param run_details: the run details of to use
        """
        settings = self._inst_settings
        if not settings.do_solid_angle:
            return
        solid_angle = mantid.SolidAngle(InputWorkspace=vanadium)
        solid_angle = mantid.Scale(InputWorkspace=solid_angle, Factor=100, Operation='Multiply')

        eff = mantid.Divide(LHSWorkspace=vanadium, RHSWorkspace=solid_angle)
        eff = mantid.ConvertUnits(InputWorkspace=eff, Target='Wavelength')
        integration_range = settings.eff_integration_range
        # use full range if no range is supplied
        integration_range = integration_range if integration_range is not None else (None, None)
        eff = mantid.Integration(InputWorkspace=eff,
                                 RangeLower=integration_range[0],
                                 RangeUpper=integration_range[1])

        correction = mantid.Multiply(LHSWorkspace=solid_angle, RHSWorkspace=eff)
        correction = mantid.Scale(InputWorkspace=correction, Factor=1e-5,
                                  Operation='Multiply')
        name = "sac" + common.generate_splined_name(run_details.run_number, [])
        path = run_details.van_paths

        mantid.SaveNexus(InputWorkspace=correction, Filename=os.path.join(path, name))
        common.remove_intermediate_workspace(eff)
        common.remove_intermediate_workspace(correction)
예제 #4
0
def _normalize_one_spectrum(single_spectrum_ws, spline, instrument):
    rebinned_spline = mantid.RebinToWorkspace(
        WorkspaceToRebin=spline,
        WorkspaceToMatch=single_spectrum_ws,
        StoreInADS=False)
    divided = mantid.Divide(LHSWorkspace=single_spectrum_ws,
                            RHSWorkspace=rebinned_spline,
                            StoreInADS=False)
    if instrument.get_instrument_prefix() == "GEM":
        values_replaced = mantid.ReplaceSpecialValues(InputWorkspace=divided,
                                                      NaNValue=0,
                                                      StoreInADS=False)
        # crop based off max between 1000 and 2000 tof as the vanadium peak on Gem will always occur here
        complete = _crop_spline_to_percent_of_max(rebinned_spline,
                                                  values_replaced,
                                                  single_spectrum_ws, 1000,
                                                  2000)
    else:
        complete = mantid.ReplaceSpecialValues(
            InputWorkspace=divided,
            NaNValue=0,
            OutputWorkspace=single_spectrum_ws)

    if instrument.perform_abs_vanadium_norm():
        vanadium_material = spline.sample().getMaterial()
        v_number_density = vanadium_material.numberDensityEffective
        v_cross_section = vanadium_material.totalScatterXSection()
        vanadium_shape = spline.sample().getShape()
        # number density in Angstroms-3, volume in m3. Don't bother with 1E30 factor because will cancel
        num_v_atoms = vanadium_shape.volume() * v_number_density

        sample_material = single_spectrum_ws.sample().getMaterial()
        sample_number_density = sample_material.numberDensityEffective
        sample_shape = spline.sample().getShape()
        num_sample_atoms = sample_shape.volume() * sample_number_density

        abs_norm_factor = v_cross_section * num_v_atoms / \
                          (num_sample_atoms * 4 * math.pi)
        logger.notice(
            "Performing absolute normalisation, multiplying by factor=" +
            str(abs_norm_factor))
        # avoid "Variable invalidated, data has been deleted" error when debugging
        output_ws_name = single_spectrum_ws.name()
        abs_norm_factor_ws = mantid.CreateSingleValuedWorkspace(
            DataValue=abs_norm_factor, OutputWorkspace="__abs_norm_factor_ws")
        complete = mantid.Multiply(LHSWorkspace=complete,
                                   RHSWorkspace=abs_norm_factor_ws,
                                   OutputWorkspace=output_ws_name)

    return complete
    def _correct_sample_can(self):
        """
        Correct for sample and container.
        """

        logger.information('Correcting sample and container')
        corrected_can_ws = '__corrected_can'

        factor_types = ['_ass']
        if self._use_can:
            factor_types.extend(['_acc', '_acsc', '_assc'])
        corr_unit = s_api.mtd[self._corrections +
                              '_ass'].getAxis(0).getUnit().unitID()
        for f_type in factor_types:
            self._convert_units_wavelength(corr_unit,
                                           self._corrections + f_type,
                                           self._corrections + f_type,
                                           "Wavelength")

        if self._rebin_container_ws:
            s_api.RebinToWorkspace(
                WorkspaceToRebin=self._scaled_container_wavelength,
                WorkspaceToMatch=self._corrections + '_acc',
                OutputWorkspace=self._scaled_container_wavelength)

        # Acc
        s_api.Divide(LHSWorkspace=self._scaled_container_wavelength,
                     RHSWorkspace=self._corrections + '_acc',
                     OutputWorkspace=corrected_can_ws)

        # Acsc
        s_api.Multiply(LHSWorkspace=corrected_can_ws,
                       RHSWorkspace=self._corrections + '_acsc',
                       OutputWorkspace=corrected_can_ws)
        s_api.Minus(LHSWorkspace=self._sample_ws_wavelength,
                    RHSWorkspace=corrected_can_ws,
                    OutputWorkspace=self._output_ws_name)

        # Assc
        s_api.Divide(LHSWorkspace=self._output_ws_name,
                     RHSWorkspace=self._corrections + '_assc',
                     OutputWorkspace=self._output_ws_name)

        for f_type in factor_types:
            self._convert_units_wavelength(corr_unit,
                                           self._corrections + f_type,
                                           self._corrections + f_type,
                                           corr_unit)

        s_api.DeleteWorkspace(corrected_can_ws)
예제 #6
0
    def calculate(self, reducer, wave_wksps=[]):
        """
            Multiplies all the wavelength scalings into one workspace and all the detector
            dependent scalings into another workspace that can be used by ConvertToQ. It is important
            that the wavelength correction workspaces have a know distribution/non-distribution state
            @param reducer: settings used for this reduction
            @param wave_wksps: additional wavelength dependent correction workspaces to include
        """
        for step in self._wave_steps:
            if step.output_wksp:
                wave_wksps.append(step.output_wksp)

        wave_adj = None
        for wksp in wave_wksps:
            # before the workspaces can be combined they all need to match
            api.RebinToWorkspace(WorkspaceToRebin=wksp,
                                 WorkspaceToMatch=reducer.output_wksp,
                                 OutputWorkspace=self.TMP_WORKSPACE_NAME)

            if not wave_adj:
                # first time around this loop
                wave_adj = self.WAVE_CORR_NAME
                api.RenameWorkspace(InputWorkspace=self.TMP_WORKSPACE_NAME,
                                    OutputWorkspace=wave_adj)
            else:
                api.Multiply(LHSWorkspace=self.TMP_WORKSPACE_NAME,
                             RHSWorkspace=wave_adj,
                             OutputWorkspace=wave_adj)

        # read pixel correction file
        # note the python code below is an attempt to emulate function overloading
        # If a derived class overwrite self._load and self._load_params then
        # a custom specific loading can be achieved
        pixel_adj = ''
        if self._pixel_file:
            pixel_adj = self.PIXEL_CORR_NAME
            load_com = self._load + '(Filename="' + self._pixel_file + '",OutputWorkspace="' + pixel_adj + '"'
            if self._load_params:
                load_com += ',' + self._load_params
            load_com += ')'
            eval(load_com)

        if AnalysisDataService.doesExist(self.TMP_WORKSPACE_NAME):
            AnalysisDataService.remove(self.TMP_WORKSPACE_NAME)

        return wave_adj, pixel_adj