def _vana_correct(self): """ creates the corrected workspace """ wslist = [] # 1. normalize Vanadium and Background vana_normws = api.mtd[self.vanaws.getName() + '_NORM'] bkg_normws = api.mtd[self.bkgws.getName() + '_NORM'] _vana_norm_ = api.Divide(self.vanaws, vana_normws) wslist.append(_vana_norm_.getName()) _bkg_norm_ = api.Divide(self.bkgws, bkg_normws) wslist.append(_bkg_norm_.getName()) # 2. substract background from Vanadium _vana_bg_ = _vana_norm_ - _bkg_norm_ wslist.append(_vana_bg_.getName()) # check negative values, throw exception arr = np.array(_vana_bg_.extractY()).flatten() neg_values = np.where(arr < 0)[0] if len(neg_values): mlzutils.cleanup(wslist) message = "Background " + self.bkgws.getName() + " is higher than Vanadium " + \ self.vanaws.getName() + " signal!" self.log().error(message) raise RuntimeError(message) # 3. calculate correction coefficients _vana_mean_ws_ = self._vana_mean(_vana_bg_) if not _vana_mean_ws_: mlzutils.cleanup(wslist) return None if not self.vana_mean_name: wslist.append(_vana_mean_ws_.getName()) _coef_ws_ = api.Divide(LHSWorkspace=_vana_bg_, RHSWorkspace=_vana_mean_ws_, WarnOnZeroDivide=True) wslist.append(_coef_ws_.getName()) # 4. correct raw data (not normalized!) api.Divide(LHSWorkspace=self.dataws, RHSWorkspace=_coef_ws_, WarnOnZeroDivide=True, OutputWorkspace=self.outws_name) outws = api.mtd[self.outws_name] # cleanup mlzutils.cleanup(wslist) return outws
def _fr_correction(self): """ applies flipping ratio correction creates the corrected workspaces """ wslist = [] # 1. normalize NiCr and Background sf_nicr_normws = api.AnalysisDataService.retrieve(self.input_workspaces['SF_NiCr'] + '_NORM') sf_nicr = api.AnalysisDataService.retrieve(self.input_workspaces['SF_NiCr']) _sf_nicr_norm_ = api.Divide(sf_nicr, sf_nicr_normws) wslist.append(_sf_nicr_norm_.getName()) nsf_nicr_normws = api.AnalysisDataService.retrieve(self.input_workspaces['NSF_NiCr'] + '_NORM') nsf_nicr = api.AnalysisDataService.retrieve(self.input_workspaces['NSF_NiCr']) _nsf_nicr_norm_ = api.Divide(nsf_nicr, nsf_nicr_normws) wslist.append(_nsf_nicr_norm_.getName()) sf_bkgr_normws = api.AnalysisDataService.retrieve(self.input_workspaces['SF_Background'] + '_NORM') sf_bkgr = api.AnalysisDataService.retrieve(self.input_workspaces['SF_Background']) _sf_bkgr_norm_ = api.Divide(sf_bkgr, sf_bkgr_normws) wslist.append(_sf_bkgr_norm_.getName()) nsf_bkgr_normws = api.AnalysisDataService.retrieve(self.input_workspaces['NSF_Background'] + '_NORM') nsf_bkgr = api.AnalysisDataService.retrieve(self.input_workspaces['NSF_Background']) _nsf_bkgr_norm_ = api.Divide(nsf_bkgr, nsf_bkgr_normws) wslist.append(_nsf_bkgr_norm_.getName()) # 2. subtract background from NiCr _sf_nicr_bg_ = _sf_nicr_norm_ - _sf_bkgr_norm_ wslist.append(_sf_nicr_bg_.getName()) _nsf_nicr_bg_ = _nsf_nicr_norm_ - _nsf_bkgr_norm_ wslist.append(_nsf_nicr_bg_.getName()) # check negative values, throw exception sf_arr = np.array(_sf_nicr_bg_.extractY()).flatten() nsf_arr = np.array(_nsf_nicr_bg_.extractY()).flatten() sf_neg_values = np.where(sf_arr < 0)[0] nsf_neg_values = np.where(nsf_arr < 0)[0] if len(sf_neg_values) or len(nsf_neg_values): mlzutils.cleanup(wslist) message = "Background is higher than NiCr signal!" self.log().error(message) raise RuntimeError(message) # 3. calculate correction coefficients _coef_ws_ = api.Divide(LHSWorkspace=_nsf_nicr_bg_, RHSWorkspace=_sf_nicr_bg_, WarnOnZeroDivide=True) wslist.append(_coef_ws_.getName()) # 4. apply correction raw data (not normalized!) sf_data_ws = api.AnalysisDataService.retrieve(self.input_workspaces['SF_Data']) nsf_data_ws = api.AnalysisDataService.retrieve(self.input_workspaces['NSF_Data']) # NSF_corr[i] = NSF[i] - SF[i]/c[i] _tmp_ws_ = api.Divide(LHSWorkspace=sf_data_ws, RHSWorkspace=_coef_ws_, WarnOnZeroDivide=True) _tmp_ws_.setYUnit(nsf_data_ws.YUnit()) api.Minus(LHSWorkspace=nsf_data_ws, RHSWorkspace=_tmp_ws_, OutputWorkspace=self.nsf_outws_name) nsf_outws = api.AnalysisDataService.retrieve(self.nsf_outws_name) api.DeleteWorkspace(_tmp_ws_) # SF_corr[i] = SF[i] - NSF[i]/c[i] _tmp_ws_ = api.Divide(LHSWorkspace=nsf_data_ws, RHSWorkspace=_coef_ws_, WarnOnZeroDivide=True) _tmp_ws_.setYUnit(sf_data_ws.YUnit()) api.Minus(LHSWorkspace=sf_data_ws, RHSWorkspace=_tmp_ws_, OutputWorkspace=self.sf_outws_name) sf_outws = api.AnalysisDataService.retrieve(self.sf_outws_name) api.DeleteWorkspace(_tmp_ws_) # 5. Apply correction for a double spin-flip scattering if self.dfr > 1e-7: _tmp_ws_ = sf_outws * self.dfr _tmp_ws_.setYUnit(nsf_outws.YUnit()) wslist.append(_tmp_ws_.getName()) # NSF_corr[i] = NSF_prev_corr[i] - SF_prev_corr*dfr, SF_corr = SF_prev_corr api.Minus(LHSWorkspace=nsf_outws, RHSWorkspace=_tmp_ws_, OutputWorkspace=self.nsf_outws_name) # cleanup mlzutils.cleanup(wslist) return