Example #1
0
def focus_whole(run_number, van_curves, van_int, full_inst_calib, focus_directory, focus_general, do_pre_process, params, time_period):
    """
    focus a whole run with no cropping

    @param run_number :: the run nuumber to focus
    @param van_curves :: the path to the vanadium curves file
    @param van_int :: the path to the integrated vanadium file
    @param full_inst_calib :: workspace containing the full instrument calibration
    @param focus_directory :: the user specific focus directory to save to
    @param focus_general :: the general focus directory to save to
    @param do_pre_process :: whether or not to pre-process the run before focussing it
    @param params :: the rebin parameters for pre-processing
    @param time_period :: the time period for pre-processing

    """
    van_curves_ws, van_integrated_ws, ws_to_focus = _prepare_focus(run_number, van_curves, van_int, do_pre_process,
                                                                   params, time_period)
    # loop through both banks, focus and save them
    for bank_no in range(1, 3):
        sample_ws_clone = simple.CloneWorkspace(ws_to_focus)
        curves_ws_clone = simple.CloneWorkspace(van_curves_ws)
        tof_output_name = "engg_focus_output_bank_{}".format(bank_no)
        dspacing_output_name = tof_output_name + "_dSpacing"
        cal_file = NORTH_BANK_CAL if bank_no == 1 else SOUTH_BANK_CAL
        region_calib = 'engg_calibration_bank_1' if bank_no == 1 else 'engg_calibration_bank_2'
        df_kwarg = {"GroupingFileName": cal_file}
        _run_focus(input_workspace=sample_ws_clone, tof_output_name=tof_output_name, region_calib=region_calib,
                   vanadium_integration_ws=van_integrated_ws, vanadium_curves_ws=curves_ws_clone, df_kwarg=df_kwarg,
                   full_calib=full_inst_calib)
        _save_out(run_number, focus_directory, focus_general, tof_output_name, "ENGINX_{}_{}{{}}", str(bank_no))
        _save_out(run_number, focus_directory, focus_general, dspacing_output_name, "ENGINX_{}_{}{{}}", str(bank_no))
        simple.DeleteWorkspace(sample_ws_clone)
        simple.DeleteWorkspace(curves_ws_clone)
Example #2
0
def convert_to_y_space_and_symmetrise(ws_name,mass):
    # phenomenological roule-of-thumb to define the y-range for a given mass
    max_Y = np.ceil(2.5*mass+27)
    rebin_parameters = str(-max_Y)+","+str(2.*max_Y/120)+","+str(max_Y)
    # converting to y-space, rebinning, and defining a normalisation matrix to take into account the kinetic cut-off
    sapi.ConvertToYSpace(InputWorkspace=ws_name,Mass=mass,OutputWorkspace=ws_name+"_JoY",QWorkspace=ws_name+"_Q")
    ws = sapi.Rebin(InputWorkspace=ws_name+"_JoY", Params = rebin_parameters,FullBinsOnly=True, OutputWorkspace= ws_name+"_JoY")
    tmp=sapi.CloneWorkspace(InputWorkspace=ws_name+"_JoY")
    for j in range(tmp.getNumberHistograms()):
        for k in range(tmp.blocksize()):
            tmp.dataE(j)[k] =0.
            if np.isnan( tmp.dataY(j)[k] ) :
                ws.dataY(j)[k] =0.
                tmp.dataY(j)[k] =0.
            if (tmp.dataY(j)[k]!=0):
                tmp.dataY(j)[k] =1.
    tmp=sapi.SumSpectra('tmp')
    sapi.SumSpectra(InputWorkspace=ws_name+"_JoY",OutputWorkspace=ws_name+"_JoY_sum")
    sapi.Divide(LHSWorkspace=ws_name+"_JoY_sum", RHSWorkspace="tmp", OutputWorkspace =ws_name+"_JoY_sum")
    #rewriting the temporary workspaces ws and tmp
    ws=sapi.mtd[ws_name+"_JoY_sum"]
    tmp=sapi.CloneWorkspace(InputWorkspace=ws_name+"_JoY_sum")
    for k in range(tmp.blocksize()):
        tmp.dataE(0)[k] =(ws.dataE(0)[k]+ws.dataE(0)[ws.blocksize()-1-k])/2.
        tmp.dataY(0)[k] =(ws.dataY(0)[k]+ws.dataY(0)[ws.blocksize()-1-k])/2.
    sapi.RenameWorkspace(InputWorkspace="tmp",OutputWorkspace=ws_name+"_JoY_sym")
    normalise_workspace(ws_name+"_JoY_sym")
    return max_Y
    def _convert_units_wavelength(self, unit, input_ws, output_ws, target):

        if unit != 'Wavelength':
            # Configure conversion
            if unit == 'dSpacing':
                emode = 'Elastic'
                efixed = 0.0
            elif unit == 'DeltaE':
                emode = 'Indirect'
                from IndirectCommon import getEfixed
                efixed = getEfixed(input_ws)
            else:
                s_api.CloneWorkspace(InputWorkspace=input_ws,
                                     OutputWorkspace=output_ws)
                #raise ValueError('Unit %s in sample workspace is not supported' % unit)

            if unit == 'dSpacing' or unit == 'DeltaE':
                # Do conversion
                # Use temporary workspace so we don't modify data
                s_api.ConvertUnits(InputWorkspace=input_ws,
                                   OutputWorkspace=output_ws,
                                   Target=target,
                                   EMode=emode,
                                   EFixed=efixed)

        else:
            # No need to convert
            s_api.CloneWorkspace(InputWorkspace=input_ws,
                                 OutputWorkspace=output_ws)
Example #4
0
def _focus_mode_trans(output_file_paths, atten, instrument,
                      calibrated_spectra):
    summed_ws = mantid.CloneWorkspace(InputWorkspace=calibrated_spectra[0])
    for i in range(1, 9):  # Add workspaces 2-9 to workspace 1
        summed_ws = mantid.Plus(LHSWorkspace=summed_ws,
                                RHSWorkspace=calibrated_spectra[i])

    summed_ws = mantid.Scale(InputWorkspace=summed_ws,
                             Factor=0.111111111111111)

    if atten:
        # Clone a workspace which is not attenuated
        no_att = output_file_paths["output_name"] + "_noatten"
        mantid.CloneWorkspace(InputWorkspace=summed_ws, OutputWorkspace=no_att)

        summed_ws = mantid.ConvertUnits(InputWorkspace=summed_ws,
                                        Target="dSpacing")
        summed_ws = instrument._attenuate_workspace(summed_ws)
        summed_ws = mantid.ConvertUnits(InputWorkspace=summed_ws, Target="TOF")

    mantid.SaveGSS(InputWorkspace=summed_ws,
                   Filename=output_file_paths["gss_filename"],
                   Append=False,
                   Bank=1)
    mantid.SaveFocusedXYE(InputWorkspace=summed_ws,
                          Filename=output_file_paths["tof_xye_filename"],
                          Append=False,
                          IncludeHeader=False)

    summed_ws = mantid.ConvertUnits(InputWorkspace=summed_ws,
                                    Target="dSpacing")

    # Rename to user friendly name:
    summed_ws_name = output_file_paths["output_name"] + "_mods1-9"
    summed_ws = mantid.RenameWorkspace(InputWorkspace=summed_ws,
                                       OutputWorkspace=summed_ws_name)

    mantid.SaveFocusedXYE(InputWorkspace=summed_ws,
                          Filename=output_file_paths["dspacing_xye_filename"],
                          Append=False,
                          IncludeHeader=False)
    mantid.SaveNexus(InputWorkspace=summed_ws,
                     Filename=output_file_paths["nxs_filename"],
                     Append=False)

    output_list = [summed_ws]

    for i in range(0, 9):
        workspace_name = output_file_paths["output_name"] + "_mod" + str(i + 1)
        to_save = mantid.ConvertUnits(InputWorkspace=calibrated_spectra[i],
                                      Target="dSpacing",
                                      OutputWorkspace=workspace_name)
        output_list.append(to_save)
        mantid.SaveNexus(Filename=output_file_paths["nxs_filename"],
                         InputWorkspace=to_save,
                         Append=True)

    return output_list
Example #5
0
def focus_cropped(run_number, van_curves, van_int, full_inst_calib, focus_directory, focus_general, do_pre_process, params, time_period,
                  crop_on,
                  use_spectra):
    """
    focus a partial run, cropping either on banks or on specific spectra

    @param van_curves :: the path to the vanadium curves file
    @param van_int :: the path to the integrated vanadium file
    @param full_inst_calib :: workspace containing the full instrument calibration
    @param run_number :: the run nuumber to focus
    @param focus_directory :: the user specific focus directory to save to
    @param focus_general :: the general focus directory to save to
    @param do_pre_process :: whether or not to pre-process the run before focussing it
    @param params :: the rebin parameters for pre-processing
    @param time_period :: the time period for pre-processing
    @param crop_on :: the bank or spectra to crop on
    @param use_spectra :: whether to focus by spectra or banks

    """
    van_curves_ws, van_integrated_ws, ws_to_focus = _prepare_focus(run_number, van_curves, van_int, do_pre_process,
                                                                   params, time_period)
    tof_output_name = "engg_focus_output{0}{1}"
    sample_ws_clone = simple.CloneWorkspace(ws_to_focus)
    curves_ws_clone = simple.CloneWorkspace(van_curves_ws)
    # check whether to crop on bank or spectra
    if not use_spectra:
        # get the bank to crop on, focus and save it out
        bank = {"North": "1",
                "South": "2"}
        bank_no = bank.get(crop_on)
        cal_file = NORTH_BANK_CAL if bank_no == 1 else SOUTH_BANK_CAL
        region_calib = 'engg_calibration_bank_1' if bank_no == 1 else 'engg_calibration_bank_2'
        df_kwarg = {"GroupingFileName": cal_file}
        tof_output_name = tof_output_name.format("_bank_", bank_no)
        dspacing_output_name = tof_output_name + "_dSpacing"
        _run_focus(input_workspace=sample_ws_clone, tof_output_name=tof_output_name, vanadium_integration_ws=van_integrated_ws,
                   vanadium_curves_ws=curves_ws_clone, df_kwarg=df_kwarg, full_calib=full_inst_calib,
                   region_calib=region_calib)
        _save_out(run_number, focus_directory, focus_general, tof_output_name, "ENGINX_{}_{}{{}}", crop_on)
        _save_out(run_number, focus_directory, focus_general, dspacing_output_name, "ENGINX_{}_{}{{}}", crop_on)
    else:
        # crop on the spectra passed in, focus and save it out
        tof_output_name = tof_output_name.format("_", "cropped")
        dspacing_output_name = tof_output_name + "_dSpacing"
        grp_ws = Utils.create_grouping_workspace_from_spectra_list(crop_on, ws_to_focus)
        df_kwarg = {"GroupingWorkspace": grp_ws}
        region_calib = 'engg_calibration_cropped'
        _run_focus(input_workspace=sample_ws_clone, tof_output_name=tof_output_name, vanadium_integration_ws=van_integrated_ws,
                   vanadium_curves_ws=curves_ws_clone, df_kwarg=df_kwarg, region_calib=region_calib,
                   full_calib=full_inst_calib)
        _save_out(run_number, focus_directory, focus_general, tof_output_name, "ENGINX_{}_bank_{}{{}}", "cropped")
        _save_out(run_number, focus_directory, focus_general, dspacing_output_name, "ENGINX_{}_bank_{}{{}}", "cropped")
    simple.DeleteWorkspace(sample_ws_clone)
    simple.DeleteWorkspace(curves_ws_clone)
def stitch_reflectivity(reduction_list,
                        xs=None,
                        normalize_to_unity=True,
                        q_cutoff=0.01):
    """
        Stitch and normalize data sets

        :param string xs: name of the cross-section to use
        :param bool normalize_to_unity: if True, the specular ridge will be normalized to 1
    """
    if not reduction_list:
        return []

    # Select the cross-section we will use to determine the scaling factors
    if xs is None:
        xs = reduction_list[0].cross_sections.keys()[0]

    # First, determine the overall scaling factor as needed
    scaling_factor = 1.0
    if normalize_to_unity:
        idx_list = reduction_list[0].cross_sections[xs].q < q_cutoff
        total = 0
        weights = 0
        for i in range(len(reduction_list[0].cross_sections[xs]._r)):
            if idx_list[i]:
                w = 1.0 / float(reduction_list[0].cross_sections[xs]._dr[i])**2
                total += w * float(reduction_list[0].cross_sections[xs]._r[i])
                weights += w
        if weights > 0 and total > 0:
            scaling_factor = weights / total
        reduction_list[0].set_parameter("scaling_factor", scaling_factor)
    else:
        scaling_factor = reduction_list[0].cross_sections[
            xs].configuration.scaling_factor

    # Stitch the data sets together
    _previous_ws = None
    running_scale = scaling_factor
    scaling_factors = [running_scale]

    for i in range(len(reduction_list)):
        n_total = len(reduction_list[i].cross_sections[xs].q)
        p_0 = reduction_list[i].cross_sections[
            xs].configuration.cut_first_n_points
        p_n = n_total - reduction_list[i].cross_sections[
            xs].configuration.cut_last_n_points
        ws = api.CreateWorkspace(
            DataX=reduction_list[i].cross_sections[xs].q[p_0:p_n],
            DataY=reduction_list[i].cross_sections[xs]._r[p_0:p_n],
            DataE=reduction_list[i].cross_sections[xs]._dr[p_0:p_n])
        ws.setDistribution(True)
        ws = api.ConvertToHistogram(ws)
        if _previous_ws is not None:
            _, scale = api.Stitch1D(_previous_ws, ws)
            running_scale *= scale
            scaling_factors.append(running_scale)
            reduction_list[i].set_parameter("scaling_factor", running_scale)
        _previous_ws = api.CloneWorkspace(ws)

    return scaling_factors
Example #7
0
    def PyExec(self):
        input_ws = self.getProperty('InputWorkspace').value
        output_ws = self.getPropertyValue('OutputWorkspace')

        num_specs = input_ws.getNumberHistograms()
        api.CloneWorkspace(InputWorkspace=input_ws, OutputWorkspace=output_ws)

        for i in range(0, num_specs):
            x_data = input_ws.readX(i)
            y_data = input_ws.readY(i)
            e_data = input_ws.readE(i)

            indexes =  x_data.argsort()

            x_ordered = x_data[indexes]
            if input_ws.isHistogramData():
                max_index = np.argmax(indexes)
                indexes = np.delete(indexes, max_index)

            y_ordered = y_data[indexes]
            e_ordered = e_data[indexes]

            mtd[output_ws].setX(i, x_ordered)
            mtd[output_ws].setY(i, y_ordered)
            mtd[output_ws].setE(i, e_ordered)

        self.setProperty('OutputWorkspace', output_ws)
Example #8
0
    def _pre_process_corrections(self):
        """
        If the sample is not in wavelength then convert the corrections to
        whatever units the sample is in.
        """

        unit_id = s_api.mtd[self._sample_ws_wavelength].getAxis(
            0).getUnit().unitID()
        logger.information('x-unit is ' + unit_id)

        factor_types = ['ass']
        if self._use_can:
            factor_types.extend(['acc', 'acsc', 'assc'])

        for factor_type in factor_types:
            input_name = self._get_correction_factor_ws_name(factor_type)
            output_name = self._corrections + '_' + factor_type

            s_api.CloneWorkspace(InputWorkspace=input_name,
                                 OutputWorkspace=output_name)

        # Group the temporary factor workspaces (for easy removal later)
        s_api.GroupWorkspaces(InputWorkspaces=[
            self._corrections + '_' + f_type for f_type in factor_types
        ],
                              OutputWorkspace=self._corrections)
Example #9
0
def _run_focus(input_workspace,
               tof_output_name,
               vanadium_integration_ws,
               vanadium_curves_ws,
               df_kwarg,
               full_calib,
               region_calib):
    simple.NormaliseByCurrent(InputWorkspace=input_workspace, OutputWorkspace=input_workspace)
    input_workspace /= vanadium_integration_ws
    simple.ReplaceSpecialValues(InputWorkspace=input_workspace, OutputWorkspace=input_workspace, NaNValue=0,
                                InfinityValue=0)
    simple.ApplyDiffCal(InstrumentWorkspace=input_workspace, CalibrationWorkspace=full_calib)
    ws_d = simple.ConvertUnits(InputWorkspace=input_workspace, Target='dSpacing')
    focused_sample = simple.DiffractionFocussing(InputWorkspace=ws_d, **df_kwarg)
    curves_rebinned = simple.RebinToWorkspace(WorkspaceToRebin=vanadium_curves_ws, WorkspaceToMatch=focused_sample)
    normalised = simple.Divide(LHSWorkspace=focused_sample, RHSWorkspace=curves_rebinned,
                               AllowDifferentNumberSpectra=True)
    simple.ApplyDiffCal(InstrumentWorkspace=normalised, CalibrationWorkspace=region_calib)
    dspacing_output_name = tof_output_name + "_dSpacing"
    simple.CloneWorkspace(InputWorkspace=normalised, OutputWorkspace=dspacing_output_name)
    simple.ConvertUnits(InputWorkspace=normalised, OutputWorkspace=tof_output_name, Target='TOF')
    simple.DeleteWorkspace(curves_rebinned)
    simple.DeleteWorkspace(focused_sample)
    simple.DeleteWorkspace(normalised)
    simple.DeleteWorkspace(ws_d)
Example #10
0
    def calculate_ub_matrix(self, peak_info_list, a, b, c, alpha, beta, gamma):
        """
        Calculate UB matrix

        Set Miller index from raw data in Workspace2D.
        :param peakws:
        :param a:
        :param b:
        :param c:
        :param alpha:
        :param beta:
        :param gamma:
        :return:
        """
        # Check
        assert isinstance(peak_info_list, list)
        for peak_info in peak_info_list:
            if isinstance(peak_info, PeakInfo) is False:
                raise NotImplementedError('Input PeakList is of type %s.' %
                                          str(type(peak_info_list[0])))
            assert isinstance(peak_info, PeakInfo)

        if len(peak_info_list) < 2:
            return False, 'Too few peaks are input to calculate UB matrix.  Must be >= 2.'

        # Construct a new peak workspace by combining all single peak
        ub_peak_ws_name = 'Temp_UB_Peak'
        ub_peak_ws = api.CloneWorkspace(
            InputWorkspace=peak_info_list[0].get_peak_workspace(),
            OutputWorkspace=ub_peak_ws_name)

        for i_peak_info in xrange(1, len(peak_info_list)):
            # Set HKL as optional
            peak_ws = peak_info_list[i_peak_info].get_peak_workspace()

            # Combine peak workspace
            ub_peak_ws = api.CombinePeaksWorkspaces(
                LHSWorkspace=ub_peak_ws,
                RHSWorkspace=peak_ws,
                CombineMatchingPeaks=False,
                OutputWorkspace=ub_peak_ws_name)
        # END-FOR(i_peak_info)

        # Calculate UB matrix
        try:
            api.CalculateUMatrix(PeaksWorkspace=ub_peak_ws_name,
                                 a=a,
                                 b=b,
                                 c=c,
                                 alpha=alpha,
                                 beta=beta,
                                 gamma=gamma)
        except ValueError as val_err:
            return False, str(val_err)

        ub_matrix = ub_peak_ws.sample().getOrientedLattice().getUB()

        self._myLastPeakUB = ub_peak_ws

        return True, ub_matrix
Example #11
0
    def edit_matrix_workspace(sq_name,
                              scale_factor,
                              shift,
                              edited_sq_name=None):
        """
        Edit the matrix workspace of S(Q) by scaling and shift
        :param sq_name: name of the SofQ workspace
        :param scale_factor:
        :param shift:
        :param edited_sq_name: workspace for the edited S(Q)
        :return:
        """
        # get the workspace
        if AnalysisDataService.doesExist(sq_name) is False:
            raise RuntimeError(
                'S(Q) workspace {0} cannot be found in ADS.'.format(sq_name))

        if edited_sq_name is not None:
            simpleapi.CloneWorkspace(InputWorkspace=sq_name,
                                     OutputWorkspace=edited_sq_name)
            sq_ws = AnalysisDataService.retrieve(edited_sq_name)
        else:
            sq_ws = AnalysisDataService.retrieve(sq_name)

        # get the vector of Y
        sq_ws = sq_ws * scale_factor
        sq_ws = sq_ws + shift
        if sq_ws.name() != edited_sq_name:
            simpleapi.DeleteWorkspace(Workspace=edited_sq_name)
            simpleapi.RenameWorkspace(InputWorkspace=sq_ws,
                                      OutputWorkspace=edited_sq_name)

        assert sq_ws is not None, 'S(Q) workspace cannot be None.'
        print('[DB...BAT] S(Q) workspace that is edit is {0}'.format(sq_ws))
Example #12
0
    def crop_range(cls, ws, rng):
        """
        Given a range of workspace indexes to keep (rng), Crop the workspace such that these are kept.
        Arguments:

        ws : Workspace to crop spectrum from
        rng : Tuple of range tuples. syntax may be (start, stop) or ((start0, stop0), (start1, stop1), ...)

        returns a new copy of the workspace with spectra cropped out.
        """

        _in_rng = msi.CloneWorkspace(ws)
        if not isinstance(rng, tuple):
            raise ValueError("Elements must be tuples.")

        def is_actual_range(rng):
            return len(rng) == 2 and isinstance(rng[0], int) and isinstance(
                rng[1], int)

        if is_actual_range(rng):
            start, stop = rng[0], rng[1]
            _in_rng = msi.CropWorkspace(InputWorkspace=_in_rng,
                                        StartWorkspaceIndex=start,
                                        EndWorkspaceIndex=stop)
        else:
            for subrng in rng:
                _in_rng = ConvertToWavelength.crop_range(ws, subrng)

        return _in_rng
Example #13
0
    def makePhaseQuadTable(self, inputs):
        """
        generates a phase table from CalMuonDetectorPhases
        """
        cloned_workspace_CalMuon = mantid.CloneWorkspace(
            InputWorkspace=inputs["InputWorkspace"], StoreInADS=False)
        mantid.MaskDetectors(Workspace=cloned_workspace_CalMuon,
                             DetectorList=inputs['MaskedDetectors'],
                             StoreInADS=False)

        self.alg = mantid.AlgorithmManager.create("CalMuonDetectorPhases")
        self.alg.initialize()
        self.alg.setAlwaysStoreInADS(False)
        self.alg.setRethrows(True)

        self.alg.setProperty("FirstGoodData", inputs["FirstGoodData"])
        self.alg.setProperty("LastGoodData", inputs["LastGoodData"])

        self.alg.setProperty("InputWorkspace", cloned_workspace_CalMuon)
        self.alg.setProperty("DetectorTable", "PhaseTable")
        self.alg.setProperty("DataFitted", "fits")

        self.alg.execute()
        mantid.AnalysisDataService.addOrReplace(
            "PhaseTable",
            self.alg.getProperty("DetectorTable").value)
        self.alg = None
Example #14
0
def _sum_groups_of_three_ws(calibrated_spectra, output_file_names):
    workspace_list = []
    output_list = []
    for outer_loop_count in range(0, 3):
        # First clone workspaces 1/4/7
        pass_multiplier = (outer_loop_count * 3)
        workspace_names = "focus_mode_groups-" + str(pass_multiplier + 1)
        workspace_list.append(
            mantid.CloneWorkspace(
                InputWorkspace=calibrated_spectra[pass_multiplier],
                OutputWorkspace=workspace_names))
        # Then add workspaces 1+2+3 / 4+5+6 / 7+8+9
        for i in range(1, 3):
            input_ws_index = i + pass_multiplier  # Workspaces 2/3 * n
            inner_workspace_names = "focus_mode_groups-" + str(input_ws_index)
            workspace_list[outer_loop_count] = mantid.Plus(
                LHSWorkspace=workspace_list[outer_loop_count],
                RHSWorkspace=calibrated_spectra[input_ws_index],
                OutputWorkspace=inner_workspace_names)

        # Finally scale the output workspaces
        mod_first_number = str((outer_loop_count * 3) + 1)  # Generates 1/4/7
        mod_last_number = str((outer_loop_count + 1) * 3)  # Generates 3/6/9
        workspace_names = output_file_names[
            "output_name"] + "_mod" + mod_first_number + '-' + mod_last_number
        output_list.append(
            mantid.Scale(InputWorkspace=workspace_list[outer_loop_count],
                         OutputWorkspace=workspace_names,
                         Factor=0.333333333333))
    for ws in workspace_list:
        remove_intermediate_workspace(ws)
    return output_list
    def create_indexed_peaksworkspace(self, fractional_peaks, qs, hklm):
        """Create a PeaksWorkspace that contains indexed peak data.

        :param fractional_peaks: the peaks workspace containing peaks with
            fractional HKL values.
        :param qs: The set of modulation vectors determined
        :param hklm: the new higher dimensional miller indices to add.
        :returns: a peaks workspace with the indexed peak data
        """
        # pad to 6 columns so we can assume a (hkl) (mnp) layout
        hklm = np.pad(hklm,
                      pad_width=(0, 6 - hklm.shape[1]),
                      mode='constant',
                      constant_values=0)
        indexed = api.CloneWorkspace(fractional_peaks, StoreInADS=False)
        # save modulation vectors. ensure qs has 3 rows
        qs = np.pad(qs,
                    pad_width=((0, 3 - qs.shape[0]), (0, 0)),
                    mode='constant',
                    constant_values=0)
        lattice = fractional_peaks.sample().getOrientedLattice()
        lattice.setModVec1(V3D(*qs[0]))
        lattice.setModVec2(V3D(*qs[1]))
        lattice.setModVec3(V3D(*qs[2]))
        # save indices
        for row, peak in enumerate(indexed):
            row_indices = hklm[row]
            peak.setHKL(*row_indices[:3])
            peak.setIntMNP(V3D(*row_indices[3:]))

        return indexed
Example #16
0
    def test_convert_array_run_log_to_attrs(self):
        # Given a Mantid workspace with a run log
        import mantid.simpleapi as mantid
        target = mantid.CloneWorkspace(self.base_event_ws)
        log_name = "SampleTemp"
        self.assertTrue(
            target.run().hasProperty(log_name),
            f"Expected input workspace to have a {log_name} run log")

        # When the workspace is converted to a scipp data array
        d = scn.mantid.convert_EventWorkspace_to_data_array(target, False)

        # Then the data array contains the run log as an unaligned coord
        self.assertTrue(
            np.allclose(target.run()[log_name].value,
                        d.attrs[log_name].values.data.values),
            "Expected values in the unaligned coord to match "
            "the original run log from the Mantid workspace")
        self.assertEqual(d.attrs[log_name].values.unit, sc.units.K)
        self.assertTrue(
            np.array_equal(
                target.run()[log_name].times.astype('datetime64[ns]'),
                d.attrs[log_name].values.coords["time"].values),
            "Expected times in the unaligned coord to match "
            "the original run log from the Mantid workspace")
Example #17
0
    def test_alg_produces_correct_workspace_in_APS_from_python(self):
        dataX = numpy.linspace(start=1, stop=3, num=11)
        dataY = numpy.linspace(start=1, stop=3, num=10)
        workspace1_test = simpleapi.CreateWorkspace(DataX=dataX,
                                                    dataY=dataY,
                                                    NSpec=1)
        workspace2_test = simpleapi.CloneWorkspace(workspace1_test)

        ws1_name = "workspace1_test"
        ws2_name = "workspace2_test"

        self.assertTrue(ws1_name in mtd)
        self.assertTrue(ws2_name in mtd)

        outputWs_name = "message"

        if outputWs_name in mtd:
            simpleapi.DeleteWorkspace(outputWs_name)

        result, message = simpleapi.CompareWorkspaces(workspace1_test,
                                                      workspace2_test)

        self.assertTrue(outputWs_name in mtd)

        simpleapi.DeleteWorkspace(message)
        simpleapi.DeleteWorkspace(ws1_name)
        simpleapi.DeleteWorkspace(ws2_name)
Example #18
0
    def test_function_call_returns_tuple_when_a_single_argument_is_provided(
            self):
        dataX = numpy.linspace(start=1, stop=3, num=11)
        dataY = numpy.linspace(start=1, stop=3, num=10)
        workspace1_test = simpleapi.CreateWorkspace(DataX=dataX,
                                                    dataY=dataY,
                                                    NSpec=1)
        workspace2_test = simpleapi.CloneWorkspace(workspace1_test)

        ws1_name = "workspace1_test"
        ws2_name = "workspace2_test"

        self.assertTrue(ws1_name in mtd)
        self.assertTrue(ws2_name in mtd)

        outputWs_name = "message"

        if outputWs_name in mtd:
            simpleapi.DeleteWorkspace(outputWs_name)

        result = simpleapi.CompareWorkspaces(workspace1_test, workspace2_test)

        self.assertTrue(isinstance(result, tuple))

        simpleapi.DeleteWorkspace(ws1_name)
        simpleapi.DeleteWorkspace(ws2_name)
Example #19
0
    def makePhaseQuadTable(self, inputs):
        """
        generates a phase table from CalMuonDetectorPhases
        """
        mantid.CloneWorkspace(InputWorkspace=inputs["InputWorkspace"],
                              OutputWorkspace="__tmp__")
        mantid.MaskDetectors(Workspace="__tmp__",
                             DetectorList=inputs['MaskedDetectors'],
                             StoreInADS=False)

        self.alg = mantid.AlgorithmManager.create("CalMuonDetectorPhases")
        self.alg.initialize()
        self.alg.setRethrows(True)

        self.alg.setProperty("FirstGoodData", inputs["FirstGoodData"])
        self.alg.setProperty("LastGoodData", inputs["LastGoodData"])

        self.alg.setProperty("InputWorkspace", "__tmp__")
        self.alg.setProperty("DetectorTable", "PhaseTable")
        self.alg.setProperty("DataFitted", "fits")

        self.alg.execute()
        mantid.DeleteWorkspace("__tmp__")
        mantid.DeleteWorkspace("fits")
        self.alg = None
Example #20
0
    def PhaseQuad(self, inputs):
        """
        do the phaseQuad algorithm
        groups data into a single set
        """
        cloned_workspace = mantid.CloneWorkspace(
            InputWorkspace=inputs["InputWorkspace"], StoreInADS=False)
        mantid.MaskDetectors(Workspace=cloned_workspace,
                             DetectorList=inputs['MaskedDetectors'],
                             StoreInADS=False)
        mantid.CropWorkspace(InputWorkspace=cloned_workspace,
                             XMin=inputs['FirstGoodData'],
                             XMax=inputs['LastGoodData'],
                             OutputWorkspace='cropped_workspace_pre_phasequad')

        self.alg = mantid.AlgorithmManager.create("PhaseQuad")
        self.alg.initialize()
        self.alg.setRethrows(True)

        self.alg.setProperty("InputWorkspace",
                             'cropped_workspace_pre_phasequad')
        self.alg.setProperty("PhaseTable", "PhaseTable")
        self.alg.setProperty("OutputWorkspace", "__phaseQuad__")
        self.alg.execute()
        mantid.DeleteWorkspace("cropped_workspace_pre_phasequad")
        self.alg = None
Example #21
0
def convertToElasticQ(input_ws, output_ws=None):
    """
    Helper function to convert the spectrum axis of a sample to ElasticQ.

    @param input_ws - the name of the workspace to convert from
    @param output_ws - the name to call the converted workspace
    """

    if output_ws is None:
        output_ws = input_ws

    axis = s_api.mtd[input_ws].getAxis(1)
    if axis.isSpectra():
        e_fixed = getEfixed(input_ws)
        s_api.ConvertSpectrumAxis(input_ws, Target='ElasticQ', EMode='Indirect', EFixed=e_fixed,
                                  OutputWorkspace=output_ws)

    elif axis.isNumeric():
        # Check that units are Momentum Transfer
        if axis.getUnit().unitID() != 'MomentumTransfer':
            raise RuntimeError('Input must have axis values of Q')

        s_api.CloneWorkspace(input_ws, OutputWorkspace=output_ws)

    else:
        raise RuntimeError('Input workspace must have either spectra or numeric axis.')
    def test_subtract_summed_runs(self):
        # Load a vanadium workspace for this test
        sample_empty_number = "100"
        ws_file_name = "POL" + sample_empty_number
        original_ws = mantid.Load(ws_file_name)
        mantid.AddSampleLog(Workspace=original_ws,
                            LogName='gd_prtn_chrg',
                            LogText="10.0",
                            LogType='Number')
        no_scale_ws = mantid.CloneWorkspace(
            InputWorkspace=original_ws,
            OutputWorkspace="test_subtract_sample_empty_ws")

        # Subtracting from self should equal 0
        returned_ws = common.subtract_summed_runs(
            ws_to_correct=no_scale_ws,
            instrument=ISISPowderMockInst(),
            empty_sample_ws_string=sample_empty_number)
        y_values = returned_ws.readY(0)
        for i in range(returned_ws.blocksize()):
            self.assertAlmostEqual(y_values[i], 0)

        # Check what happens when we specify scale as a half
        scaled_ws = common.subtract_summed_runs(
            ws_to_correct=original_ws,
            instrument=ISISPowderMockInst(),
            scale_factor=0.75,
            empty_sample_ws_string=sample_empty_number)
        scaled_y_values = scaled_ws.readY(0)
        self.assertAlmostEqual(scaled_y_values[2], 0.20257424)
        self.assertAlmostEqual(scaled_y_values[4], 0.31700152)
        self.assertAlmostEqual(scaled_y_values[7], 0.35193970)

        mantid.DeleteWorkspace(returned_ws)
        mantid.DeleteWorkspace(scaled_ws)
Example #23
0
def _focus_mode_groups(cycle_information, output_file_paths, save_range,
                       calibrated_spectra):
    output_list = []
    to_save = _sum_groups_of_three_ws(calibrated_spectra, output_file_paths)

    workspaces_4_to_9_name = output_file_paths["output_name"] + "_mods4-9"
    workspaces_4_to_9 = mantid.Plus(LHSWorkspace=to_save[1],
                                    RHSWorkspace=to_save[2])
    workspaces_4_to_9 = mantid.Scale(InputWorkspace=workspaces_4_to_9,
                                     Factor=0.5,
                                     OutputWorkspace=workspaces_4_to_9_name)
    to_save.append(workspaces_4_to_9)
    append = False
    index = 1
    for ws in to_save:
        if cycle_information["instrument_version"] == "new":
            mantid.SaveGSS(InputWorkspace=ws,
                           Filename=output_file_paths["gss_filename"],
                           Append=append,
                           Bank=index)
        elif cycle_information["instrument_version"] == "new2":
            mantid.SaveGSS(InputWorkspace=ws,
                           Filename=output_file_paths["gss_filename"],
                           Append=False,
                           Bank=index)

        workspace_names = ws.name()
        dspacing_ws = mantid.ConvertUnits(InputWorkspace=ws,
                                          OutputWorkspace=workspace_names,
                                          Target="dSpacing")
        remove_intermediate_workspace(ws)
        output_list.append(dspacing_ws)
        mantid.SaveNexus(Filename=output_file_paths["nxs_filename"],
                         InputWorkspace=dspacing_ws,
                         Append=append)
        append = True
        index += 1

    for i in range(0, save_range):
        monitor_ws_name = output_file_paths["output_name"] + "_mod" + str(i +
                                                                          10)

        monitor_ws = calibrated_spectra[i + 9]
        to_save = mantid.CloneWorkspace(InputWorkspace=monitor_ws,
                                        OutputWorkspace=monitor_ws_name)

        mantid.SaveGSS(InputWorkspace=to_save,
                       Filename=output_file_paths["gss_filename"],
                       Append=True,
                       Bank=i + 5)
        to_save = mantid.ConvertUnits(InputWorkspace=to_save,
                                      OutputWorkspace=monitor_ws_name,
                                      Target="dSpacing")
        mantid.SaveNexus(Filename=output_file_paths["nxs_filename"],
                         InputWorkspace=to_save,
                         Append=True)

        output_list.append(to_save)

    return output_list
Example #24
0
def block_fit_ncp(par,first_spectrum,last_spectrum, masses,ws_name,fit_arguments, verbose,IPFile, g_log):
    g_log.notice( "\n"+ "Fitting Workspace: "+ str(ws_name))
    g_log.debug("Fitting parameters are given as: [Intensity Width Centre] for each NCP")
    widths=np.zeros((len(masses),last_spectrum-first_spectrum+1))
    intensities=np.zeros((len(masses),last_spectrum-first_spectrum+1))
    centres=np.zeros((len(masses),last_spectrum-first_spectrum+1))
    spectra=np.zeros((last_spectrum-first_spectrum+1))
    tof_fit_ws = sapi.CloneWorkspace(InputWorkspace=str(ws_name),OutputWorkspace=str(ws_name)+"_fit")
    j=0
    for j, spectrum in enumerate(range(first_spectrum,last_spectrum+1)):
        data_x, data_y,data_e = load_workspace(ws_name , spectrum)
        ncp, fitted_par, result = fit_ncp(par, spectrum, masses, data_x, data_y, data_e, fit_arguments, IPFile, g_log)
        for bin in range(len(data_x)-1):
            tof_fit_ws.dataY(j)[bin] = ncp[bin]*(data_x[bin+1]-data_x[bin])
            tof_fit_ws.dataE(j)[bin] = 0.
        # Calculate the reduced chi2 from the fitting Cost function:
        reduced_chi2 = result.fun/(len(data_x) - len(par))
        if (reduced_chi2 > 1.e-3):
            g_log.debug( spectrum, fitted_par, "%.4g" % reduced_chi2)
        else:
            g_log.debug( spectrum, " ... skipping ...")
        npars = len(par)/len(masses)
        for m in range(len(masses)):
            if (reduced_chi2>1.e-3):
                index = int(npars*m)
                intensities[m][j]=float(fitted_par[index])
                widths[m][j]=float(fitted_par[index+1])
                centres[m][j]=float(fitted_par[index+2])
            else:
                widths[m][j]=None
                intensities[m][j]=None
                centres[m][j]=None

        spectra[j]=spectrum
    return spectra, widths, intensities, centres
Example #25
0
def _attenuate_workspace(output_file_paths, attenuated_ws,
                         attenuation_filepath):
    # Clone a workspace which is not attenuated
    no_att = output_file_paths["output_name"] + "_noatten"
    mantid.CloneWorkspace(InputWorkspace=attenuated_ws, OutputWorkspace=no_att)
    return pearl_algs.attenuate_workspace(
        attenuation_file_path=attenuation_filepath,
        ws_to_correct=attenuated_ws)
def merge_reflectivity(reduction_list, xs, q_min=0.001, q_step=-0.01):
    """
        Combine the workspaces for a given cross-section into a single workspace.

        TODO: trim workspaces
            trim_first = [item.cross_sections[pol_state].configuration.cut_first_n_points for item in self.data_manager.reduction_list]
            trim_last = [item.cross_sections[pol_state].configuration.cut_last_n_points for item in self.data_manager.reduction_list]

    """
    ws_list = []
    scaling_factors = []
    q_max = q_min

    for i in range(len(reduction_list)):
        # If we couldn't calculate the reflectivity, we won't have a workspace available
        if reduction_list[i].cross_sections[xs].reflectivity_workspace is None:
            continue

        _, _q_max = reduction_list[i].get_q_range()
        q_max = max(q_max, _q_max)
        ws_name = str(
            reduction_list[i].cross_sections[xs].reflectivity_workspace)
        # Stitch1DMany only scales workspaces relative to the first one
        if i == 0:
            api.Scale(InputWorkspace=ws_name,
                      OutputWorkspace=ws_name + '_histo',
                      factor=reduction_list[i].cross_sections[xs].
                      configuration.scaling_factor,
                      Operation='Multiply')
            api.ConvertToHistogram(InputWorkspace=ws_name + '_histo',
                                   OutputWorkspace=ws_name + '_histo')
        else:
            scaling_factors.append(reduction_list[i].cross_sections[xs].
                                   configuration.scaling_factor)
            api.ConvertToHistogram(InputWorkspace=ws_name,
                                   OutputWorkspace=ws_name + '_histo')
        ws_list.append(ws_name + '_histo')
        params = "%s, %s, %s" % (q_min, q_step, q_max)

    if len(ws_list) > 1:
        merged_ws, _ = api.Stitch1DMany(InputWorkspaces=ws_list,
                                        Params=params,
                                        UseManualScaleFactors=True,
                                        ManualScaleFactors=scaling_factors,
                                        OutputWorkspace=ws_name + "_merged")
    elif len(ws_list) == 1:
        merged_ws = api.CloneWorkspace(ws_list[0],
                                       OutputWorkspace=ws_name + "_merged")
    else:
        return None

    # Remove temporary workspaces
    for ws in ws_list:
        api.DeleteWorkspace(ws)

    api.SaveAscii(InputWorkspace=merged_ws, Filename="/tmp/test.txt")
    return merged_ws
Example #27
0
 def _clone(self, workspace):
     """
     Clones the specified workspace.
     :param workspace:   The workspace to clone.
     :return:            A clone of the specified workspace.
     """
     return s_api.CloneWorkspace(InputWorkspace=workspace,
                                 OutputWorkspace="cloned",
                                 StoreInADS=False)
Example #28
0
def focus_texture_mode(run_number, van_curves, van_int, full_inst_calib, focus_directory, focus_general, do_pre_process, params,
                       time_period, dg_file):
    """
    perform a texture mode focusing using the grouping csv file

    @param run_number :: the run number to focus
    @param van_curves :: the path to the vanadium curves file
    @param van_int :: the path to the integrated vanadium file
    @param full_inst_calib :: workspace containing the full instrument calibration
    @param focus_directory :: the user specific focus directory to save to
    @param focus_general :: the general focus directory to save to
    @param do_pre_process :: whether or not to pre-process the run before focussing it
    @param params :: the rebin parameters for pre-processing
    @param time_period :: the time period for pre-processing
    @param dg_file :: the grouping file to use for texture mode

    """
    van_curves_ws, van_integrated_ws, ws_to_focus = _prepare_focus(run_number, van_curves, van_int, do_pre_process,
                                                                   params, time_period)
    banks = {}
    # read the csv file to work out the banks
    # ensure csv reading works on python 2 or 3
    with open(dg_file, 'r', newline='', encoding='utf-8') as grouping_file:
        group_reader = csv.reader(_decomment_csv(grouping_file), delimiter=',')

        for row in group_reader:
            banks.update({row[0]: ','.join(row[1:])})

    # loop through the banks described in the csv, focusing and saving them out
    for bank in banks:
        sample_ws_clone = simple.CloneWorkspace(ws_to_focus)
        curves_ws_clone = simple.CloneWorkspace(van_curves_ws)
        tof_output_name = "engg_focusing_output_ws_texture_bank_{}"
        tof_output_name = tof_output_name.format(bank)
        dspacing_output_name = tof_output_name + "_dSpacing"
        grp_ws = Utils.create_grouping_workspace_from_spectra_list(banks[bank], ws_to_focus)
        df_kwarg = {"GroupingWorkspace": grp_ws}
        _run_focus(input_workspace=sample_ws_clone, tof_output_name=tof_output_name, region_calib=full_inst_calib,
                   vanadium_curves_ws=curves_ws_clone, full_calib=full_inst_calib, df_kwarg=df_kwarg,
                   vanadium_integration_ws=van_integrated_ws)
        _save_out(run_number, focus_directory, focus_general, tof_output_name, "ENGINX_{}_texture_{}{{}}", bank)
        _save_out(run_number, focus_directory, focus_general, dspacing_output_name, "ENGINX_{}_texture_{}{{}}", bank)
        simple.DeleteWorkspace(sample_ws_clone)
        simple.DeleteWorkspace(curves_ws_clone)
Example #29
0
def subtract_other_masses(ws_name, widths, intensities, centres, spectra, masses, IPFile,g_log):
    hydrogen_ws = sapi.CloneWorkspace(InputWorkspace=ws_name)
    for index in range(len(spectra)): # for each spectrum
        data_x, data_y, data_e = load_workspace(ws_name , spectra[index]) # get the experimental data after the last correction
        for m in range(len(masses)-1): # for all the masses but the first (generally H)
            other_par = (intensities[m+1, index], widths[m+1, index], centres[m+1,index]) # define the input parameters to get the NCPs
            ncp = calculate_ncp(other_par, spectra[index], [masses[m+1]], data_x, IPFile, g_log)
            for bin in range(len(data_x)-1):
                hydrogen_ws.dataY(index)[bin] -= ncp[bin]*(data_x[bin+1]-data_x[bin])
    return hydrogen_ws
Example #30
0
 def test_set_sample(self):
     import mantid.simpleapi as mantid
     target = mantid.CloneWorkspace(self.base_event_ws)
     d = scn.mantid.convert_EventWorkspace_to_data_array(target, False)
     d.attrs["sample"].value.setThickness(3)
     # before
     self.assertNotEqual(3, target.sample().getThickness())
     target.setSample(d.attrs["sample"].value)
     # after
     self.assertEqual(3, target.sample().getThickness())