def PyExec(self): ws_list = self.getProperty('InputWorkspaces').value x_min = self.getProperty('XMin').value x_max = self.getProperty('XMax').value scale_bool = self.getProperty('CalculateScale').value offset_bool = self.getProperty('CalculateOffset').value flattened_list = self.unwrap_groups(ws_list) largest_range_spectrum, rebin_param = self.get_common_bin_range_and_largest_spectra(flattened_list) CloneWorkspace(InputWorkspace=flattened_list[0], OutputWorkspace='ws_conjoined') Rebin(InputWorkspace='ws_conjoined', OutputWorkspace='ws_conjoined', Params=rebin_param) for ws in flattened_list[1:]: temp = CloneWorkspace(InputWorkspace=ws) temp = Rebin(InputWorkspace=temp, Params=rebin_param) ConjoinWorkspaces(InputWorkspace1='ws_conjoined', InputWorkspace2=temp, CheckOverlapping=False) ws_conjoined = AnalysisDataService.retrieve('ws_conjoined') ref_spec = ws_conjoined.getSpectrum(largest_range_spectrum).getSpectrumNo() ws_conjoined, offset, scale, chisq = MatchSpectra(InputWorkspace=ws_conjoined, ReferenceSpectrum=ref_spec, CalculateScale=scale_bool, CalculateOffset=offset_bool) x_min, x_max, bin_width = self.fit_x_lims_to_match_histogram_bins(ws_conjoined, x_min, x_max) ws_conjoined = CropWorkspaceRagged(InputWorkspace=ws_conjoined, XMin=x_min, XMax=x_max) ws_conjoined = Rebin(InputWorkspace=ws_conjoined, Params=[min(x_min), bin_width, max(x_max)]) merged_ws = SumSpectra(InputWorkspace=ws_conjoined, WeightedSum=True, MultiplyBySpectra=False, StoreInADS=False) DeleteWorkspace(ws_conjoined) self.setProperty('OutputWorkspace', merged_ws)
def _compute_caad(fit_workspaces): indices = _get_caad_indices(fit_workspaces[0]) normalised_workspaces = [ _normalise_by_integral(_extract_spectra(workspace, indices)) for workspace in fit_workspaces ] number_of_spectrum = normalised_workspaces[0].getNumberHistograms() normalised_workspaces = [[ ExtractSingleSpectrum(InputWorkspace=workspace, OutputWorkspace="__extracted", WorkspaceIndex=index, StoreInADS=False, EnableLogging=True) for workspace in normalised_workspaces ] for index in range(number_of_spectrum)] normalised_workspaces = [ _append_all(workspaces) for workspaces in normalised_workspaces ] summed_workspaces = [ SumSpectra(InputWorkspace=workspace, OutputWorkspace="__summed", StoreInADS=False, EnableLogging=False) for workspace in normalised_workspaces ] return normalised_workspaces, summed_workspaces, indices
def run_algorithm(input_workspace, range, integral, output_workspace, x_dim, y_dim): hv_min = range[0] hv_max = range[1] if integral == IntegralEnum.Horizontal: output_workspace = ConvertAxesToRealSpace(InputWorkspace=input_workspace, OutputWorkspace=output_workspace, VerticalAxis='x', HorizontalAxis='y', NumberVerticalBins=int(x_dim), NumberHorizontalBins=int(y_dim)) output_workspace = SumSpectra(InputWorkspace=output_workspace, OutputWorkspace=output_workspace, StartWorkspaceIndex=hv_min, EndWorkspaceIndex=hv_max) elif integral == IntegralEnum.Vertical: output_workspace = ConvertAxesToRealSpace(InputWorkspace=input_workspace, OutputWorkspace=output_workspace, VerticalAxis='y', HorizontalAxis='x', NumberVerticalBins=int(x_dim), NumberHorizontalBins=int(y_dim)) output_workspace = SumSpectra(InputWorkspace=output_workspace, OutputWorkspace=output_workspace, StartWorkspaceIndex=hv_min, EndWorkspaceIndex=hv_max) elif integral == IntegralEnum.Time: output_workspace = SumSpectra(InputWorkspace=input_workspace, OutputWorkspace=output_workspace, StartWorkspaceIndex=hv_min, EndWorkspaceIndex=hv_max) return output_workspace
def extract_cuts_matrix(workspace: MatrixWorkspace, xmin: float, xmax: float, ymin: float, ymax: float, xcut_name: str, ycut_name: str, log_algorithm_calls: bool = True): """ Assuming a MatrixWorkspace with vertical numeric axis, extract 1D cuts from the region defined by the given parameters :param workspace: A MatrixWorkspace with a vertical NumericAxis :param xmin: X min for bounded region :param xmax: X max for bounded region :param ymin: Y min for bounded region :param ymax: Y max for bounded region :param xcut_name: Name of the X cut. Empty indicates it should be skipped :param ycut_name: Name of the Y cut. Empty indicates it should be skipped :param log_algorithm_calls: Log the algorithm call or be silent """ tmp_crop_region = '__tmp_sv_region_extract' transpose = False roi = extract_roi_matrix(workspace, xmin, xmax, ymin, ymax, transpose, tmp_crop_region, log_algorithm_calls) # perform ycut first so xcut can reuse tmp workspace for rebinning if necessary if ycut_name: Rebin(InputWorkspace=roi, OutputWorkspace=ycut_name, Params=[xmin, xmax - xmin, xmax], EnableLogging=log_algorithm_calls) Transpose(InputWorkspace=ycut_name, OutputWorkspace=ycut_name, EnableLogging=log_algorithm_calls) if xcut_name: if not roi.isCommonBins(): # rebin to a common grid using the resolution from the spectrum # with the lowest resolution to avoid overbinning roi = _rebin_to_common_grid(roi, xmin, xmax, log_algorithm_calls) SumSpectra(InputWorkspace=roi, OutputWorkspace=xcut_name, EnableLogging=log_algorithm_calls) try: DeleteWorkspace(tmp_crop_region, EnableLogging=log_algorithm_calls) except ValueError: pass
def _convert_to_q(self, w): """ Convert to momentum transfer with the desired binning Parameters ---------- w: Mantid.MatrixWorkspace2D Returns ------- Mantid.MatrixWorkspace2D """ _t_w = ConvertUnits(w, Target='MomentumTransfer', Emode='Elastic') _t_w = Rebin(_t_w, Params=self._qbins, PreserveEvents=False) _t_w = SumSpectra(_t_w, OutputWorkspace=w.name()) return _t_w
def PyExec(self): _background = bool(self.getProperty("Background").value) _load_inst = bool(self.getProperty("LoadInstrument").value) _norm_current = bool(self.getProperty("NormaliseByCurrent").value) _detcal = bool(self.getProperty("DetCal").value) _masking = bool(self.getProperty("MaskFile").value) _grouping = bool(self.getProperty("GroupingFile").value) _anvred = bool(self.getProperty("SphericalAbsorptionCorrection").value) _SA_name = self.getPropertyValue("SolidAngleOutputWorkspace") _Flux_name = self.getPropertyValue("FluxOutputWorkspace") XMin = self.getProperty("MomentumMin").value XMax = self.getProperty("MomentumMax").value rebin_param = ','.join([str(XMin), str(XMax), str(XMax)]) Load(Filename=self.getPropertyValue("Filename"), OutputWorkspace='__van', FilterByTofMin=self.getProperty("FilterByTofMin").value, FilterByTofMax=self.getProperty("FilterByTofMax").value) if _norm_current: NormaliseByCurrent(InputWorkspace='__van', OutputWorkspace='__van') if _background: Load(Filename=self.getProperty("Background").value, OutputWorkspace='__bkg', FilterByTofMin=self.getProperty("FilterByTofMin").value, FilterByTofMax=self.getProperty("FilterByTofMax").value) if _norm_current: NormaliseByCurrent(InputWorkspace='__bkg', OutputWorkspace='__bkg') else: pc_van = mtd['__van'].run().getProtonCharge() pc_bkg = mtd['__bkg'].run().getProtonCharge() mtd['__bkg'] *= pc_van / pc_bkg mtd['__bkg'] *= self.getProperty('BackgroundScale').value Minus(LHSWorkspace='__van', RHSWorkspace='__bkg', OutputWorkspace='__van') DeleteWorkspace('__bkg') if _load_inst: LoadInstrument(Workspace='__van', Filename=self.getProperty("LoadInstrument").value, RewriteSpectraMap=False) if _detcal: LoadIsawDetCal(InputWorkspace='__van', Filename=self.getProperty("DetCal").value) if _masking: LoadMask(Instrument=mtd['__van'].getInstrument().getName(), InputFile=self.getProperty("MaskFile").value, OutputWorkspace='__mask') MaskDetectors(Workspace='__van', MaskedWorkspace='__mask') DeleteWorkspace('__mask') ConvertUnits(InputWorkspace='__van', OutputWorkspace='__van', Target='Momentum') Rebin(InputWorkspace='__van', OutputWorkspace='__van', Params=rebin_param) CropWorkspace(InputWorkspace='__van', OutputWorkspace='__van', XMin=XMin, XMax=XMax) if _anvred: AnvredCorrection(InputWorkspace='__van', OutputWorkspace='__van', LinearScatteringCoef=self.getProperty( "LinearScatteringCoef").value, LinearAbsorptionCoef=self.getProperty( "LinearAbsorptionCoef").value, Radius=self.getProperty("Radius").value, OnlySphericalAbsorption='1', PowerLambda='0') # Create solid angle Rebin(InputWorkspace='__van', OutputWorkspace=_SA_name, Params=rebin_param, PreserveEvents=False) # Create flux if _grouping: GroupDetectors(InputWorkspace='__van', OutputWorkspace='__van', MapFile=self.getProperty("GroupingFile").value) else: SumSpectra(InputWorkspace='__van', OutputWorkspace='__van') Rebin(InputWorkspace='__van', OutputWorkspace='__van', Params=rebin_param) flux = mtd['__van'] for i in range(flux.getNumberHistograms()): el = flux.getSpectrum(i) if flux.readY(i)[0] > 0: el.divide(flux.readY(i)[0], flux.readE(i)[0]) SortEvents(InputWorkspace='__van', SortBy="X Value") IntegrateFlux(InputWorkspace='__van', OutputWorkspace=_Flux_name, NPoints=10000) DeleteWorkspace('__van') self.setProperty("SolidAngleOutputWorkspace", mtd[_SA_name]) self.setProperty("FluxOutputWorkspace", mtd[_Flux_name])
def int3samples(runs, name, masks, binning='0.5, 0.05, 8.0'): """ Finds the polarisation versus wavelength for a set of detector tubes. Parameters ---------- runs: list of RunData objects The runs whose polarisation we are interested in. name: string The name of this set of runs masks: list of string The file names of the masks for the sequential tubes that are being used for the SEMSANS measurements. binning: string The binning values to use for the wavelength bins. The default value is '0.5, 0.025, 10.0' """ for tube, _ in enumerate(masks): for i in [1, 2]: final_state = "{}_{}_{}".format(name, tube, i) if final_state in mtd.getObjectNames(): DeleteWorkspace(final_state) for rnum in runs: w1 = Load(BASE.format(rnum.number), LoadMonitors=True) w1mon = ExtractSingleSpectrum('w1_monitors', 0) w1 = ConvertUnits('w1', 'Wavelength', AlignBins=1) w1mon = ConvertUnits(w1mon, 'Wavelength') w1 = Rebin(w1, binning, PreserveEvents=False) w1mon = Rebin(w1mon, binning) w1 = w1 / w1mon for tube, mask in enumerate(masks): Mask_Tube = LoadMask('LARMOR', mask) w1temp = CloneWorkspace(w1) MaskDetectors(w1temp, MaskedWorkspace="Mask_Tube") Tube_Sum = SumSpectra(w1temp) for i in [1, 2]: final_state = "{}_{}_{}".format(name, tube, i) if final_state in mtd.getObjectNames(): mtd[final_state] += mtd["Tube_Sum_{}".format(i)] else: mtd[final_state] = mtd["Tube_Sum_{}".format(i)] x = mtd["{}_0_1".format(name)].extractX()[0] dx = (x[1:] + x[:-1]) / 2 pols = [] for run in runs: he_stat = he3_stats(run) start = (run.start - he_stat.dt).seconds / 3600 / he_stat.t1 end = (run.end - he_stat.dt).seconds / 3600 / he_stat.t1 for time in np.linspace(start, end, 10): temp = he3pol(he_stat.scale, time)(dx) pols.append(temp) wpol = CreateWorkspace( x, np.mean(pols, axis=0), # and the blank UnitX="Wavelength", YUnitLabel="Counts") for tube, _ in enumerate(masks): up = mtd["{}_{}_2".format(name, tube)] dn = mtd["{}_{}_1".format(name, tube)] pol = (up - dn) / (up + dn) pol /= wpol DeleteWorkspaces( ["{}_{}_{}".format(name, tube, i) for i in range(1, 3)]) RenameWorkspace("pol", OutputWorkspace="{}_{}".format(name, tube)) DeleteWorkspaces(["Tube_Sum_1", "Tube_Sum_2"]) GroupWorkspaces([ "{}_{}".format(name, tube) for tube, _ in enumerate(masks) for i in range(1, 3) ], OutputWorkspace=str(name))
def PyExec(self): data = self.getProperty("InputWorkspace").value # [1~n] bkg = self.getProperty("BackgroundWorkspace").value # [1~n] cal = self.getProperty("CalibrationWorkspace").value # [1] xMin = self.getProperty("XMin").value xMax = self.getProperty("XMax").value numberBins = self.getProperty("NumberBins").value outWS = self.getPropertyValue("OutputWorkspace") # NOTE: # StringArrayProperty cannot be optional, so the background can only be passed in as a string # or a list, which will be manually unpacked here if bkg != "": bkg = [ AnalysisDataService.retrieve(me) for me in map(str.strip, bkg.split(",")) ] # NOTE: # xMin and xMax are initialized as empty numpy.array (np.array([])). _xMin, _xMax = self._locate_global_xlimit() xMin = _xMin if xMin.size == 0 else xMin xMax = _xMax if xMax.size == 0 else xMax # BEGIN_FOR: prcess_spectra for n, _wsn in enumerate(data): _mskn = f"__mask_{n}" # calculated in previous loop _ws = AnalysisDataService.retrieve(_wsn) # resample spectra _ws_resampled = ResampleX( InputWorkspace=f"__ws_{n}", XMin=xMin, XMax=xMax, NumberBins=numberBins, EnableLogging=False, ) # calibration if cal is not None: _ws_cal_resampled = self._resample_calibration(_ws, _mskn, xMin, xMax) _ws_resampled = Divide( LHSWorkspace=_ws_resampled, RHSWorkspace=_ws_cal_resampled, EnableLogging=False, ) else: _ws_cal_resampled = None _ws_resampled = Scale( InputWorkspace=_ws_resampled, Factor=self._get_scale(cal) / self._get_scale(_ws), EnableLogging=False, ) # background if bkg != "": bgn = bkg[n] if isinstance(bkg, list) else bkg _ws_bkg_resampled = self._resample_background( bgn, _ws, _mskn, xMin, xMax, _ws_cal_resampled ) _ws_resampled = Minus( LHSWorkspace=_ws_resampled, RHSWorkspace=_ws_bkg_resampled, EnableLogging=False, ) # conjoin if n < 1: CloneWorkspace( InputWorkspace=_ws_resampled, OutputWorkspace="__ws_conjoined", EnableLogging=False, ) else: ConjoinWorkspaces( InputWorkspace1="__ws_conjoined", InputWorkspace2=_ws_resampled, CheckOverlapping=False, EnableLogging=False, ) # END_FOR: prcess_spectra # Step_3: sum all spectra # ref: https://docs.mantidproject.org/nightly/algorithms/SumSpectra-v1.html if cal is not None: SumSpectra( InputWorkspace="__ws_conjoined", OutputWorkspace=outWS, WeightedSum=True, MultiplyBySpectra=False, EnableLogging=False, ) else: SumSpectra( InputWorkspace="__ws_conjoined", OutputWorkspace=outWS, WeightedSum=True, MultiplyBySpectra=True, EnableLogging=False, ) self.setProperty("OutputWorkspace", outWS) # Step_4: remove temp workspaces [ DeleteWorkspace(ws, EnableLogging=False) for ws in self.temp_workspace_list if mtd.doesExist(ws) ]
def PyExec(self): data = self._expand_groups() bkg = self.getProperty( "BackgroundWorkspace").valueAsStr # same background for all cal = self.getProperty( "CalibrationWorkspace").value # same calibration for all numberBins = self.getProperty("NumberBins").value outWS = self.getPropertyValue("OutputWorkspace") summing = self.getProperty("Sum").value # [Yes or No] # convert all of the input workspaces into spectrum of "target" units (generally angle) data, masks = self._convert_data(data) # determine x-range xMin, xMax = self._locate_global_xlimit(data) # BEGIN_FOR: prcess_spectra for n, (_wsn, _mskn) in enumerate(zip(data, masks)): # resample spectra ResampleX( InputWorkspace=_wsn, OutputWorkspace=_wsn, XMin=xMin, XMax=xMax, NumberBins=numberBins, EnableLogging=False, ) # calibration if cal is not None: _ws_cal_resampled = self._resample_calibration( _wsn, _mskn, xMin, xMax) Divide( LHSWorkspace=_wsn, RHSWorkspace=_ws_cal_resampled, OutputWorkspace=_wsn, EnableLogging=False, ) else: _ws_cal_resampled = None Scale( InputWorkspace=_wsn, OutputWorkspace=_wsn, Factor=self._get_scale(cal) / self._get_scale(_wsn), EnableLogging=False, ) # background if bkg: _ws_bkg_resampled = self._resample_background( bkg, _wsn, _mskn, xMin, xMax, _ws_cal_resampled) Minus( LHSWorkspace=_wsn, RHSWorkspace=_ws_bkg_resampled, OutputWorkspace=_wsn, EnableLogging=False, ) if summing: # conjoin if n < 1: RenameWorkspace( InputWorkspace=_wsn, OutputWorkspace="__ws_conjoined", EnableLogging=False, ) else: # this adds to `InputWorkspace1` ConjoinWorkspaces( InputWorkspace1="__ws_conjoined", InputWorkspace2=_wsn, CheckOverlapping=False, EnableLogging=False, ) # END_FOR: prcess_spectra # Step_3: sum all spectra # ref: https://docs.mantidproject.org/nightly/algorithms/SumSpectra-v1.html if summing: if cal is not None: outWS = SumSpectra( InputWorkspace="__ws_conjoined", OutputWorkspace=outWS, WeightedSum=True, MultiplyBySpectra=not bool(cal), EnableLogging=False, ) else: outWS = SumSpectra( InputWorkspace="__ws_conjoined", OutputWorkspace=outWS, WeightedSum=True, MultiplyBySpectra=True, EnableLogging=False, ) else: if len(data) == 1: outWS = RenameWorkspace(InputWorkspace=data[0], OutputWorkspace=outWS) else: outWS = GroupWorkspaces(InputWorkspaces=data, OutputWorkspace=outWS) self.setProperty("OutputWorkspace", outWS) # Step_4: remove temp workspaces [ DeleteWorkspace(ws, EnableLogging=False) for ws in self.temp_workspace_list if mtd.doesExist(ws) ]