def fitted_in_tof(fitted_in_dspacing: Union[str, Workspace2D], difc: Union[str, TableWorkspace], output_workspace: str, group_workspace: Union[str, WorkspaceGroup] = None): r""" Create a workspace of fitted spectra in TOF @param fitted_in_dspacing : workspace of fitted spectra in d-spacing @param difc : table of DIFC parameters @param output_workspace : name for the workspace of fitted spectra in TOF @param group_workspace : if provided, add `output_workspace` to `group_workspace` @returns reference to the `output_workspace` """ dspacing_workspace, difc_workspace = mtd[str(fitted_in_dspacing)], mtd[ str(difc)] # Validate number of histograms in fitted_in_dspacing is same as in difc error_message = f'{dspacing_workspace} and {difc_workspace} have different number of spectra' assert dspacing_workspace.getNumberHistograms( ) == difc_workspace.getNumberHistograms(), error_message # Divide fitted_in_dspacing by difc, and assign output_workspace to it output = Multiply(LHSWokspace=difc_workspace, RHSWorkspace=dspacing_workspace, OutputWorkspace=output_workspace) # if group_workspace is not None, add output_workspace to group_workspace if group_workspace is not None: mtd[str(group_workspace)].add(output_workspace) return output
def correctForChopperOpenings(ws, directWS, names, cleanup, logging): """Correct reflectivity values if chopper openings between RB and DB differ.""" def opening(instrumentName, logs, Xs): chopperGap = chopperPairDistance(logs, instrumentName) chopperPeriod = 60. / chopperSpeed(logs, instrumentName) openingAngle = chopperOpeningAngle(logs, instrumentName) return chopperGap * constants.m_n / constants.h / chopperPeriod * Xs * 1e-10 + openingAngle / 360. instrumentName = ws.getInstrument().getName() Xs = ws.readX(0) if ws.isHistogramData(): Xs = (Xs[:-1] + Xs[1:]) / 2. reflectedOpening = opening(instrumentName, ws.run(), Xs) directOpening = opening(instrumentName, directWS.run(), Xs) corFactorWSName = names.withSuffix('chopper_opening_correction_factors') corFactorWS = CreateWorkspace(OutputWorkspace=corFactorWSName, DataX=ws.readX(0), DataY=directOpening / reflectedOpening, UnitX=ws.getAxis(0).getUnit().unitID(), ParentWorkspace=ws, EnableLogging=logging) correctedWSName = names.withSuffix('corrected_by_chopper_opening') correctedWS = Multiply(LHSWorkspace=ws, RHSWorkspace=corFactorWS, OutputWorkspace=correctedWSName, EnableLogging=logging) cleanup.cleanup(corFactorWS) cleanup.cleanup(ws) return correctedWS
def monitorTransfit(self, files, foilType, divE): isFirstFile = True isSingleFile = len(files) == 1 firstFileName = "" for file in files: discard, fileName = path.split(file) fnNoExt = path.splitext(fileName)[0] if isFirstFile: firstFileName = fnNoExt fileName_Raw = fnNoExt + '_raw' fileName_3 = fnNoExt + '_3' LoadRaw(Filename=file, OutputWorkspace=fileName_Raw) CropWorkspace(InputWorkspace=fileName_Raw, OutputWorkspace=fileName_Raw, XMin=100, XMax=19990) NormaliseByCurrent(InputWorkspace=fileName_Raw, OutputWorkspace=fileName_Raw) ExtractSingleSpectrum(InputWorkspace=fileName_Raw, OutputWorkspace=fileName_3, WorkspaceIndex=3) DeleteWorkspace(fileName_Raw) ConvertUnits(InputWorkspace=fileName_3, Target='Energy', OutputWorkspace=fileName_3) self.TransfitRebin(fileName_3, fileName_3, foilType, divE) if not isFirstFile: Plus(LHSWorkspace=firstFileName + '_3', RHSWorkspace=fileName_3, OutputWorkspace=firstFileName + '_3') DeleteWorkspace(fileName_3) else: isFirstFile = False if isSingleFile: RenameWorkspace(InputWorkspace=firstFileName + '_3', OutputWorkspace=firstFileName + '_monitor') else: noFiles = len(files) ** (-1) CreateSingleValuedWorkspace(OutputWorkspace='scale', DataValue=noFiles) Multiply(LHSWorkspace=firstFileName + '_3', RHSWorkspace='scale', OutputWorkspace=firstFileName + '_monitor') DeleteWorkspace('scale') DeleteWorkspace(firstFileName + '_3')
def PyExec(self): input_ws = self.getProperty("InputWorkspace").value eff_ws = self.getProperty("DetectorEfficiencyWorkspace").value transposed = Transpose(InputWorkspace=eff_ws, StoreInADS=False) efficiencies = transposed.extractY().flatten() errors = transposed.extractE().flatten() n_hist = input_ws.getNumberHistograms() if n_hist % efficiencies.size != 0: raise ValueError( 'Number of histograms in input workspace is not a multiple of number of entries in detector efficiency ' 'workspace.') n_time_indexes = n_hist / efficiencies.size to_multiply = CreateWorkspace(DataY=np.repeat(efficiencies, n_time_indexes), DataE=np.repeat(errors, n_time_indexes), DataX=np.zeros(n_hist), NSpec=n_hist, StoreInADS=False) output = Multiply( LHSWorkspace=input_ws, RHSWorkspace=to_multiply, OutputWorkspace=self.getPropertyValue("OutputWorkspace")) # In the output we should mask the detectors where calibration constant is masked det_IDs = '' n_pixels_per_tube = eff_ws.getNumberHistograms() for spectrum in range(n_pixels_per_tube): if eff_ws.hasMaskedBins(spectrum): masked = eff_ws.maskedBinsIndices(spectrum) for bin in masked: det_IDs += str(bin * n_pixels_per_tube + spectrum + 1) + ',' if det_IDs: MaskDetectors(Workspace=output, DetectorList=det_IDs[:-1]) self.setProperty("OutputWorkspace", output)
def performOperation(self): lhs_valid, rhs_valid, err_msg = self.validateInputs() if err_msg != str(): return lhs_valid, rhs_valid, err_msg lhs_ws, rhs_ws = self._scale_input_workspaces() try: if self._operation == '+': if self._md_lhs or self._md_rhs: PlusMD(LHSWorkspace=lhs_ws, RHSWorkspace=rhs_ws, OutputWorkspace=self._output_ws) else: Plus(LHSWorkspace=lhs_ws, RHSWorkspace=rhs_ws, OutputWorkspace=self._output_ws) elif self._operation == '-': if self._md_lhs or self._md_rhs: MinusMD(LHSWorkspace=lhs_ws, RHSWorkspace=rhs_ws, OutputWorkspace=self._output_ws) else: Minus(LHSWorkspace=lhs_ws, RHSWorkspace=rhs_ws, OutputWorkspace=self._output_ws) elif self._operation == '*': if self._md_lhs or self._md_rhs: MultiplyMD(LHSWorkspace=lhs_ws, RHSWorkspace=rhs_ws, OutputWorkspace=self._output_ws) else: Multiply(LHSWorkspace=lhs_ws, RHSWorkspace=rhs_ws, OutputWorkspace=self._output_ws) elif self._operation == 'WM': if self._md_lhs or self._md_rhs: WeightedMeanMD(LHSWorkspace=lhs_ws, RHSWorkspace=rhs_ws, OutputWorkspace=self._output_ws) else: WeightedMean(InputWorkspace1=lhs_ws, InputWorkspace2=rhs_ws, OutputWorkspace=self._output_ws) else: if self._md_lhs or self._md_rhs: DivideMD(LHSWorkspace=lhs_ws, RHSWorkspace=rhs_ws, OutputWorkspace=self._output_ws) else: Divide(LHSWorkspace=lhs_ws, RHSWorkspace=rhs_ws, OutputWorkspace=self._output_ws) except (RuntimeError, ValueError) as err: return False, False, str(err) else: self._regularize_output_names(self._output_ws) finally: DeleteWorkspaces(WorkspaceList=[lhs_ws, rhs_ws]) return True, True, ""
def _three_factor_corrections_approximation(self, sample_workspace, container_workspace, factor_workspaces): acc = factor_workspaces['acc'] acsc = factor_workspaces['acsc'] assc = factor_workspaces['assc'] subtrahend = Multiply(container_workspace, (acsc / acc), StoreInADS=False) difference = Minus(sample_workspace, subtrahend, StoreInADS=False) quotient = Divide(difference, assc, OutputWorkspace="__quotient") return quotient
def _createMaskWS(ws, name, algorithmLogging): """Return a single bin workspace with same number of histograms as ws.""" extractResult = ExtractMask(InputWorkspace=ws, OutputWorkspace=name, EnableLogging=algorithmLogging) zeroWS = CreateSingleValuedWorkspace(DataValue=0., ErrorValue=0., EnableLogging=algorithmLogging, StoreInADS=False) maskWS = Multiply(LHSWorkspace=extractResult.OutputWorkspace, RHSWorkspace=zeroWS, OutputWorkspace=name, EnableLogging=algorithmLogging) return maskWS
def checkResults(self, eventCheck=False, xsection="AttenuationXSection"): # Check results Multiply(LHSWorkspace=self._input_wksp, RHSWorkspace=self._correction_wksp, OutputWorkspace=self._output_wksp) output_wksp = AnalysisDataService.retrieve(self._output_wksp) self.assertEqual( output_wksp.getAxis(0).getUnit().unitID(), 'Wavelength') self.assertAlmostEqual(output_wksp.readX(0)[79], 0.995) if eventCheck: self.assertAlmostEqual(output_wksp.readY(0)[79], 66.23970242900438) else: if xsection == "AttenuationXSection": self.assertAlmostEqual(output_wksp.readY(0)[79], 3250.28183501) if xsection == "TotalXSection": self.assertAlmostEqual(output_wksp.readY(0)[79], 3245.70148939)
def _subtractEC(ws, ecWS, ecScaling, wsNames, wsCleanup, algorithmLogging): """Subtract empty container.""" # out = in - ecScaling * EC scalingWSName = wsNames.withSuffix('ecScaling') scalingWS = CreateSingleValuedWorkspace(OutputWorkspace=scalingWSName, DataValue=ecScaling, EnableLogging=algorithmLogging) scaledECWSName = wsNames.withSuffix('scaled_EC') scaledECWS = Multiply(LHSWorkspace=ecWS, RHSWorkspace=scalingWS, OutputWorkspace=scaledECWSName, EnableLogging=algorithmLogging) ecSubtractedWSName = wsNames.withSuffix('EC_subtracted') ecSubtractedWS = Minus(LHSWorkspace=ws, RHSWorkspace=scaledECWS, OutputWorkspace=ecSubtractedWSName, EnableLogging=algorithmLogging) wsCleanup.cleanup(scalingWS) wsCleanup.cleanup(scaledECWS) return ecSubtractedWS
def PyExec(self): input_ws = self.getProperty("InputWorkspace").value eff_ws = self.getProperty("DetectorEfficiencyWorkspace").value transposed = Transpose(InputWorkspace=eff_ws, StoreInADS=False) efficiencies = transposed.extractY().flatten() errors = transposed.extractE().flatten() n_hist = input_ws.getNumberHistograms() if n_hist % efficiencies.size != 0: raise ValueError( 'Number of histograms in input workspace is not a multiple of number of entries in detector efficiency ' 'workspace.') n_time_indexes = n_hist / efficiencies.size to_multiply = CreateWorkspace(DataY=np.repeat(efficiencies, n_time_indexes), DataE=np.repeat(errors, n_time_indexes), DataX=np.zeros(n_hist), NSpec=n_hist, StoreInADS=False) output = Multiply( LHSWorkspace=input_ws, RHSWorkspace=to_multiply, OutputWorkspace=self.getPropertyValue("OutputWorkspace")) self.setProperty("OutputWorkspace", output)
def calc_absorption_corr_using_wksp( donor_wksp, abs_method, element_size=1, prefix_name="", cache_dirs=[], ): """ Calculates absorption correction on the specified donor workspace. See the documentation for the ``calculate_absorption_correction`` function above for more details. :param donor_wksp: Input workspace to compute absorption correction on :param abs_method: Type of absorption correction: None, SampleOnly, SampleAndContainer, FullPaalmanPings :param element_size: Size of one side of the integration element cube in mm :param prefix_name: Optional prefix of the output workspaces, default is the donor_wksp name. :param cache_dirs: List of candidate cache directories to store cached abs workspace. :return: Two workspaces (A_s, A_c), the first for the sample and the second for the container """ if abs_method == "None": return "", "" if isinstance(donor_wksp, str): if not mtd.doesExist(donor_wksp): raise RuntimeError( "Specified donor workspace not found in the ADS") donor_wksp = mtd[donor_wksp] absName = donor_wksp.name() if prefix_name != '': absName = prefix_name if abs_method == "SampleOnly": AbsorptionCorrection(donor_wksp, OutputWorkspace=absName + '_ass', ScatterFrom='Sample', ElementSize=element_size) return absName + '_ass', "" elif abs_method == "SampleAndContainer": AbsorptionCorrection(donor_wksp, OutputWorkspace=absName + '_ass', ScatterFrom='Sample', ElementSize=element_size) AbsorptionCorrection(donor_wksp, OutputWorkspace=absName + '_acc', ScatterFrom='Container', ElementSize=element_size) return absName + '_ass', absName + '_acc' elif abs_method == "FullPaalmanPings": PaalmanPingsAbsorptionCorrection(donor_wksp, OutputWorkspace=absName, ElementSize=element_size) Multiply(LHSWorkspace=absName + '_acc', RHSWorkspace=absName + '_assc', OutputWorkspace=absName + '_ac') Divide(LHSWorkspace=absName + '_ac', RHSWorkspace=absName + '_acsc', OutputWorkspace=absName + '_ac') return absName + '_assc', absName + '_ac' else: raise ValueError( "Unrecognized absorption correction method '{}'".format( abs_method))
def reduce_to_2theta(hb2b_builder, pixel_matrix, hb2b_data_ws_name, counts_array, mask_vec, mask_ws_name, num_bins=1000): """ Reduce to 2theta with Masks :param hb2b_builder: :param pixel_matrix: :param hb2b_data_ws_name: :param counts_array: :param mask_vec: :param num_bins: :return: """ # reduce by PyRS if False: pyrs_raw_ws = mtd[pyrs_raw_name] vec_counts = pyrs_raw_ws.readY(0) else: vec_counts = counts_array.astype('float64') # mask if mask_vec is not None: print(mask_vec.dtype) vec_counts.astype('float64') mask_vec.astype('float64') vec_counts *= mask_vec # reduce bin_edgets, histogram = hb2b_builder.reduce_to_2theta_histogram( pixel_matrix, vec_counts, num_bins) # create workspace pyrs_reduced_name = '{}_pyrs_reduced'.format(hb2b_data_ws_name) CreateWorkspace(DataX=bin_edgets, DataY=histogram, NSpec=1, OutputWorkspace=pyrs_reduced_name) SaveNexusProcessed(InputWorkspace=pyrs_reduced_name, Filename='{}.nxs'.format(pyrs_reduced_name), Title='PyRS reduced: {}'.format(hb2b_data_ws_name)) if True: # Mantid # transfer to 2theta for data two_theta_ws_name = '{}_2theta'.format(hb2b_data_ws_name) # Mask if mask_ws_name: # Multiply by masking workspace masked_ws_name = '{}_masked'.format(hb2b_data_ws_name) Multiply(LHSWorkspace=hb2b_data_ws_name, RHSWorkspace=mask_ws_name, OutputWorkspace=masked_ws_name, ClearRHSWorkspace=False) hb2b_data_ws_name = masked_ws_name SaveNexusProcessed(InputWorkspace=hb2b_data_ws_name, Filename='{}_raw.nxs'.format(hb2b_data_ws_name)) # END-IF # # this is for test only! # ConvertSpectrumAxis(InputWorkspace=hb2b_data_ws_name, OutputWorkspace=two_theta_ws_name, Target='Theta', # OrderAxis=False) # Transpose(InputWorkspace=two_theta_ws_name, OutputWorkspace=two_theta_ws_name) # two_theta_ws = mtd[two_theta_ws_name] # for i in range(10): # print ('{}: x = {}, y = {}'.format(i, two_theta_ws.readX(0)[i], two_theta_ws.readY(0)[i])) # for i in range(10010, 10020): # print ('{}: x = {}, y = {}'.format(i, two_theta_ws.readX(0)[i], two_theta_ws.readY(0)[i])) ConvertSpectrumAxis(InputWorkspace=hb2b_data_ws_name, OutputWorkspace=two_theta_ws_name, Target='Theta') Transpose(InputWorkspace=two_theta_ws_name, OutputWorkspace=two_theta_ws_name) # final: mantid_reduced_name = '{}_mtd_reduced'.format(hb2b_data_ws_name) ResampleX(InputWorkspace=two_theta_ws_name, OutputWorkspace=mantid_reduced_name, NumberBins=num_bins, PreserveEvents=False) mantid_ws = mtd[mantid_reduced_name] SaveNexusProcessed( InputWorkspace=mantid_reduced_name, Filename='{}.nxs'.format(mantid_reduced_name), Title='Mantid reduced: {}'.format(hb2b_data_ws_name)) plt.plot(mantid_ws.readX(0), mantid_ws.readY(0), color='blue', mark='o') # END-IF plt.plot(bin_edgets[:-1], histogram, color='red') plt.show() return
eff_ws = 'efficiency' CalculateEfficiencyCorrection(InputWorkspace=incident_ws, Alpha=-0.693, OutputWorkspace=eff_ws) ConvertToPointData(InputWorkspace=eff_ws, OutputWorkspace=eff_ws) ax_eff.plot(mtd[eff_ws], '-', color=color, wkspIndex=0, label=moderator + ' efficiency') sample_ws = 'sample_ws' Multiply(LHSWorkspace=incident_ws, RHSWorkspace=eff_ws, OutputWorkspace=sample_ws) ax_bm.plot(mtd[sample_ws], 'o', color=color, wkspIndex=0, label=moderator + ' measurement') ax_bm.legend() ax_eff.legend() plt.show() exit() # ----------------------------------------------------------------------------------------- # # Fit incident spectrum