def test_importAll_creates_variable_in_current_global_dict_pointing_to_each_workspace(self): obj_names = mtd.getObjectNames() extra_names = ["ADSTest_test_1", "ADSTest_test_2", "ADSTest_test_3"] for name in extra_names: self._run_createws(name) obj_names += extra_names # Check no names are in globals for name in obj_names: self.assertFalse(name in locals()) # Pull in variables mtd.importAll() # Are they in the local namespace for name in obj_names: self.assertTrue(name in locals()) # Clean up for name in obj_names: try: del locals()[name] except KeyError: pass for name in extra_names: mtd.remove(name)
def QuasiPlot(ws_stem,plot_type,res_plot,sequential): if plot_type: if sequential: ws_name = ws_stem + '_Result' if plot_type == 'Prob' or plot_type == 'All': prob_ws = ws_stem+'_Prob' if prob_ws in mtd.getObjectNames(): MTD_PLOT.plotSpectrum(prob_ws,[1,2],False) QuasiPlotParameters(ws_name, plot_type) if plot_type == 'Fit' or plot_type == 'All': fWS = ws_stem+'_Workspace_0' MTD_PLOT.plotSpectrum(fWS,res_plot,False)
def QuasiPlot(ws_stem, plot_type, res_plot, sequential): if plot_type: if sequential: ws_name = ws_stem + '_Result' if plot_type == 'Prob' or plot_type == 'All': prob_ws = ws_stem + '_Prob' if prob_ws in mtd.getObjectNames(): MTD_PLOT.plotSpectrum(prob_ws, [1, 2], False) QuasiPlotParameters(ws_name, plot_type) if plot_type == 'Fit' or plot_type == 'All': fWS = ws_stem + '_Workspace_0' MTD_PLOT.plotSpectrum(fWS, res_plot, False)
def QuasiPlot(ws_stem,plot_type,res_plot,sequential): if plot_type: if sequential: ws_name = ws_stem + '_Result' num_spectra = mtd[ws_name].getNumberHistograms() if (plot_type == 'Prob' or plot_type == 'All'): prob_ws = ws_stem+'_Prob' if prob_ws in mtd.getObjectNames(): mp.plotSpectrum(prob_ws,[1,2],False) QuasiPlotParameters(ws_name, plot_type) if (plot_type == 'Fit' or plot_type == 'All'): fWS = ws_stem+'_Workspace_0' f_plot=mp.plotSpectrum(fWS,res_plot,False)
def QuasiPlot(ws_stem, plot_type, res_plot, sequential): if plot_type: if sequential: ws_name = ws_stem + '_Result' num_spectra = mtd[ws_name].getNumberHistograms() if (plot_type == 'Prob' or plot_type == 'All'): prob_ws = ws_stem + '_Prob' if prob_ws in mtd.getObjectNames(): mp.plotSpectrum(prob_ws, [1, 2], False) QuasiPlotParameters(ws_name, plot_type) if (plot_type == 'Fit' or plot_type == 'All'): fWS = ws_stem + '_Workspace_0' f_plot = mp.plotSpectrum(fWS, res_plot, False)
def PyExec(self): import os import numpy import math from reduction.instruments.reflectometer import wks_utility from mantid import mtd #remove all previous workspaces list_mt = mtd.getObjectNames() for _mt in list_mt: if _mt.find('_scaled') != -1: mtd.remove(_mt) if _mt.find('_reflectivity') != -1: mtd.remove(_mt) from mantidsimple import mtd bDebug = True if bDebug: print '====== Running in mode DEBUGGING =======' run_numbers = self.getProperty("RunNumbers") backSubMethod = 2 #1 uses RefRoi, 2 used own method mtd.sendLogMessage("RefLReduction: processing %s" % run_numbers) #run with normalization or not NormFlag = self.getProperty("NormFlag") normalization_run = self.getProperty("NormalizationRunNumber") data_peak = self.getProperty("SignalPeakPixelRange") data_back = self.getProperty("SignalBackgroundPixelRange") # TOF range to consider TOFrangeFlag = self.getProperty("TofRangeFlag") if (TOFrangeFlag): TOFrange = self.getProperty("TOFRange") #microS else: TOFrange = [0, 200000] # Steps for TOF rebin TOFsteps = 100.0 #use now a global q binning (user does not have control over it) #q_min = 0.005 #q_step = -0.01 # Q binning for output distribution q_min = self.getProperty("QMin") q_step = self.getProperty("QStep") if (q_step > 0): q_step = -q_step #dimension of the detector (256 by 304 pixels) maxX = 304 maxY = 256 #Due to the frame effect, it's sometimes necessary to narrow the range #over which we add all the pixels along the low resolution #Parameter data_low_res_flag = self.getProperty("LowResDataAxisPixelRangeFlag") if data_low_res_flag: data_low_res = self.getProperty("LowResDataAxisPixelRange") else: data_low_res = [0,maxX-1] norm_low_res_flag = self.getProperty("LowResNormAxisPixelRangeFlag") if norm_low_res_flag: norm_low_res = self.getProperty("LowResNormAxisPixelRange") else: norm_low_res = [0,maxX-1] h = 6.626e-34 #m^2 kg s^-1 m = 1.675e-27 #kg norm_back = self.getProperty("NormBackgroundPixelRange") norm_peak = self.getProperty("NormPeakPixelRange") subtract_data_bck = self.getProperty("SubtractSignalBackground") subtract_norm_bck = self.getProperty("SubtractNormBackground") #name of the sfCalculator txt file # slitsValuePrecision = 0.01 #precision of slits = 10% slitsValuePrecision = sfCalculator.PRECISION sfFile = self.getProperty("ScalingFactorFile") incidentMedium = self.getProperty("IncidentMediumSelected") slitsWidthFlag = self.getProperty("SlitsWidthFlag") # Pick a good workspace name ws_name = "refl%d" % run_numbers[0] ws_event_data = ws_name+"_evt" # Load the data into its workspace allow_multiple = True if len(run_numbers)>1 and allow_multiple: _list = [] for _run in run_numbers: _list.append(str(_run)) list_run = ','.join(_list) print '** Working with data runs: ' + str(list_run) for _run in run_numbers: ############################################################## # Find full path to event NeXus data file try: data_file = FileFinder.findRuns("REF_L%d" %_run)[0] except RuntimeError: msg = "RefLReduction: could not find run %d\n" % _run msg += "Add your data folder to your User Data Directories in the File menu" raise RuntimeError(msg) if not mtd.workspaceExists(ws_event_data): LoadEventNexus(Filename=data_file, OutputWorkspace=ws_event_data) else: LoadEventNexus(Filename=data_file, OutputWorkspace='tmp') mt1 = mtd[ws_event_data] mt2 = mtd['tmp'] Plus(LHSWorkspace=ws_event_data, RHSWorkspace='tmp', OutputWorkspace=ws_event_data) else: print '** Working with data run: ' + str(run_numbers[0]) try: data_file = FileFinder.findRuns("REF_L%d" %run_numbers[0])[0] except RuntimeError: msg = "RefLReduction: could not find run %d\n" %run_numbers[0] msg += "Add your data folder to your User Data Directories in the File menu" raise RuntimeError(msg) if not mtd.workspaceExists(ws_event_data): LoadEventNexus(Filename=data_file, OutputWorkspace=ws_event_data) # Get metadata mt_run = mtd[ws_event_data].getRun() ##get angles value thi_value = mt_run.getProperty('thi').value[0] thi_units = mt_run.getProperty('thi').units tthd_value = mt_run.getProperty('tthd').value[0] tthd_units = mt_run.getProperty('tthd').units thi_rad = wks_utility.angleUnitConversion(value=thi_value, from_units=thi_units, to_units='rad') tthd_rad = wks_utility.angleUnitConversion(value=tthd_value, from_units=tthd_units, to_units='rad') # Rebin data (x-axis is in TOF) print '-> Rebin' ws_histo_data = "_"+ws_name+"_histo" Rebin(InputWorkspace=ws_event_data, OutputWorkspace=ws_histo_data, Params=[TOFrange[0], TOFsteps, TOFrange[1]], PreserveEvents=True) # Keep only range of TOF of interest print '-> Crop TOF range' CropWorkspace(ws_histo_data,ws_histo_data,XMin=TOFrange[0], XMax=TOFrange[1]) # Normalized by Current (proton charge) print '-> Normalize by proton charge' NormaliseByCurrent(InputWorkspace=ws_histo_data, OutputWorkspace=ws_histo_data) # Calculation of the central pixel (using weighted average) pixelXtof_data = wks_utility.getPixelXTOF(mtd[ws_histo_data], maxX=maxX, maxY=maxY) pixelXtof_1d = pixelXtof_data.sum(axis=1) # Keep only range of pixels pixelXtof_roi = pixelXtof_1d[data_peak[0]:data_peak[1]] sz = pixelXtof_roi.size _num = 0 _den = 0 start_pixel = data_peak[0] for i in range(sz): _num += (start_pixel * pixelXtof_roi[i]) start_pixel = start_pixel + 1 _den += pixelXtof_roi[i] data_cpix = _num / _den print '-> Central pixel is {0:.1f}'.format(data_cpix) # Retrieve geometry of instrument # Sample-to-detector distance sample = mtd[ws_event_data].getInstrument().getSample() source = mtd[ws_event_data].getInstrument().getSource() dSM = sample.getDistance(source) # Create array of distances pixel->sample dPS_array = numpy.zeros((maxY, maxX)) for x in range(maxX): for y in range(maxY): _index = maxY * x + y detector = mtd[ws_event_data].getDetector(_index) dPS_array[y, x] = sample.getDistance(detector) # Array of distances pixel->source dMP_array = dPS_array + dSM # Distance sample->center of detector dSD = dPS_array[maxY / 2, maxX / 2] # Distance source->center of detector dMD = dSD + dSM ws_data = '_' + ws_name + '_DataWks' #Even if user select Background subtraction #make sure there is a background selection (peak != back selection) _LfromPx = data_back[0] _LtoPx = data_peak[0] _RfromPx = data_peak[1] _RtoPx = data_back[1] if ((_LfromPx == _LtoPx) and (_RfromPx == _RtoPx)): subtract_data_bck = False if (subtract_data_bck and (backSubMethod == 1)): print '-> substract background' ConvertToMatrixWorkspace(InputWorkspace=ws_histo_data, OutputWorkspace=ws_histo_data) ws_data_bck = '_' + ws_name + '_DataBckWks' bBackLeft = False if (data_back[0] < (data_peak[0]-1)): bBackLeft = True ws_data_bck_1 = ws_data_bck + "_1" RefRoi(InputWorkspace=ws_histo_data, OutputWorkspace=ws_data_bck_1, NXPixel=maxX, NYPixel=maxY, ConvertToQ=False, IntegrateY=False, SumPixels=True, XPixelMin=data_low_res[0], XPixelMax=data_low_res[1], YPixelMin=data_back[0], YPixelMax=data_peak[0]-1, NormalizeSum=True) ws_data_bck_1_rebin = ws_data_bck_1 + '_rebin' RebinToWorkspace(WorkspaceToRebin=ws_data_bck_1, WorkspaceToMatch=ws_histo_data, OutputWorkspace=ws_data_bck_1_rebin) bBackRight = False if ((data_peak[1]+1) < data_back[1]): bBackRight = True ws_data_bck_2 = ws_data_bck + "_2" RefRoi(InputWorkspace=ws_histo_data, OutputWorkspace=ws_data_bck_2, NXPixel=maxX, NYPixel=maxY, ConvertToQ=False, IntegrateY=False, SumPixels=True, XPixelMin=data_low_res[0], XPixelMax=data_low_res[1], YPixelMin=data_peak[1]+1, YPixelMax=data_back[1], NormalizeSum=True) ws_data_bck_2_rebin = ws_data_bck_2 + '_rebin' RebinToWorkspace(WorkspaceToRebin=ws_data_bck_2, WorkspaceToMatch=ws_histo_data, OutputWorkspace=ws_data_bck_2_rebin) if (bBackLeft and bBackRight): Plus(RHSWorkspace=ws_data_bck_1_rebin, LHSWorkspace=ws_data_bck_2_rebin, OutputWorkspace=ws_data_bck) Scale(InputWorkspace=ws_data_bck, OutputWorkspace=ws_data_bck+'_scale', Factor=0.5, Operation="Multiply") Minus(LHSWorkspace=ws_histo_data, RHSWorkspace=ws_data_bck+'_scale', OutputWorkspace=ws_data) if mtd.workspaceExists(ws_data_bck+'_scale'): mtd.deleteWorkspace(ws_data_bck+'_scale') if mtd.workspaceExists(ws_data_bck): mtd.deleteWorkspace(ws_data_bck) if mtd.workspaceExists(ws_data_bck_1_rebin): mtd.deleteWorkspace(ws_data_bck_1_rebin) if mtd.workspaceExists(ws_data_bck_2_rebin): mtd.deleteWorkspace(ws_data_bck_2_rebin) if mtd.workspaceExists(ws_data_bck_1): mtd.deleteWorkspace(ws_data_bck_1) if mtd.workspaceExists(ws_data_bck_2): mtd.deleteWorkspace(ws_data_bck_2) if mtd.workspaceExists(ws_histo_data): mtd.deleteWorkspace(ws_histo_data) elif (bBackLeft): Minus(LHSWorkspace=ws_histo_data, RHSWorkspace=ws_data_bck_1_rebin, OutputWorkspace=ws_data) if mtd.workspaceExists(ws_data_bck_1_rebin): mtd.deleteWorkspace(ws_data_bck_1_rebin) if mtd.workspaceExists(ws_data_bck_1): mtd.deleteWorkspace(ws_data_bck_1) elif (bBackRight): Minus(LHSWorkspace=ws_histo_data, RHSWorkspace=ws_data_bck_2_rebin, OutputWorkspace=ws_data) if mtd.workspaceExists(ws_data_bck_2_rebin): mtd.deleteWorkspace(ws_data_bck_2_rebin) if mtd.workspaceExists(ws_data_bck_2): mtd.deleteWorkspace(ws_data_bck_2) #cleanup (remove all negatives values ResetNegatives(InputWorkspace=ws_data, OutputWorkspace=ws_data, AddMinimum=0) if mtd.workspaceExists(ws_histo_data): mtd.deleteWorkspace(ws_histo_data) if (subtract_data_bck and (backSubMethod == 2)): #integrate over the x axis in the low axis range specified wks_utility.createIntegratedWorkspace(mtd[ws_histo_data], ws_histo_data+'_1D', fromXpixel=data_low_res[0], toXpixel=data_low_res[1], fromYpixel=0, toYpixel=255, maxX=maxX, maxY=maxY) #for each TOF, get the average counts over the two #background regions (top and bottom) _mt = mtd[ws_histo_data+'_1D'] _x_axis = _mt.readX(0)[:] _nbr_tof = len(_x_axis) _tof_range = range(_nbr_tof-1) _back_array = zeros(_nbr_tof-1) _back_array_error = zeros(_nbr_tof-1) #work on left side _LfromPx = data_back[0] _LtoPx = data_peak[0] #work on right side _RfromPx = data_peak[1] _RtoPx = data_back[1] bLeftBack = False if (_LfromPx < _LtoPx): _Larray = arange(_LtoPx - _LfromPx) + _LfromPx bLeftBack = True bRightBack = False if (_RfromPx < _RtoPx): _Rarray = arange(_RtoPx - _RfromPx) + _RfromPx bRightBack = True if (bLeftBack and bRightBack): _y_px_range = numpy.append(_Larray,_Rarray) # _y_px_range = _y_px_range.flatten() else: if (bLeftBack): _y_px_range = _Larray else: _y_px_range = _Rarray for i in _tof_range: _sum = 0. _sum_error = 0. _pts_summed = 0. _val = 0. _err = 0. for j in _y_px_range: _val = float(_mt.readY(int(j))[int(i)]) _err = float(_mt.readE(int(j))[int(i)]) if (_val != 0 and _err !=0): _new_val = float(_val / _err) _new_err = 1./_err _sum += _new_val _sum_error += _new_err if (_val !=0. and _err !=0.): _back_array[i] = float(_sum / _sum_error) _back_array_error[i] = float(1./ _sum_error) #substract this number from the rest CreateWorkspace(OutputWorkspace='background', DataX=_x_axis, DataY=_back_array, DataE=_back_array_error, UnitX="TOF", ParentWorkspace=mtd[ws_histo_data], NSpec=1) #recreate workspace at the end mt1 = mtd[ws_histo_data+'_1D'] mt2 = mtd['background'] Minus(LHSWorkspace=ws_histo_data+'_1D', RHSWorkspace='background', OutputWorkspace=ws_data) ResetNegatives(InputWorkspace=ws_data, OutputWorkspace=ws_data, AddMinimum=0) if mtd.workspaceExists(ws_histo_data+'_1D'): mtd.deleteWorkspace(ws_histo_data+'_1D') # SumSpectra(InputWorkspace=ws_data, # OutputWorkspace='wks_after_back_subtraction_1d') if (not(subtract_data_bck)): wks_utility.createIntegratedWorkspace(mtd[ws_histo_data], ws_data, fromXpixel=data_low_res[0], toXpixel=data_low_res[1], fromYpixel=data_peak[0], toYpixel=data_peak[1], maxX=maxX, maxY=maxY) ConvertToMatrixWorkspace(InputWorkspace=ws_data, OutputWorkspace=ws_data) # ConvertToMatrixWorkspace(InputWorkspace=ws_data, # OutputWorkspace=ws_data) if mtd.workspaceExists(ws_histo_data): mtd.deleteWorkspace(ws_histo_data) mtd.deleteWorkspace(ws_histo_data) if (NormFlag): print '-> normalization file is ' + str(normalization_run) # Find full path to event NeXus data file try: norm_file = FileFinder.findRuns("REF_L%d" %normalization_run)[0] except RuntimeError: msg = "RefLReduction: could not find run %d\n" %normalization_run msg += "Add your data folder to your User Data Directories in the File menu" raise RuntimeError(msg) #load normalization file ws_name = "_normalization_refl%d" % normalization_run ws_norm_event_data = ws_name+"_evt" ws_norm_histo_data = ws_name+"_histo" if not mtd.workspaceExists(ws_norm_event_data): LoadEventNexus(Filename=norm_file, OutputWorkspace=ws_norm_event_data) # Rebin data print '-> rebin normalization' Rebin(InputWorkspace=ws_norm_event_data, OutputWorkspace=ws_norm_histo_data, Params=[TOFrange[0], TOFsteps, TOFrange[1]]) # Keep only range of TOF of interest print '-> Crop TOF range' CropWorkspace(InputWorkspace=ws_norm_histo_data, OutputWorkspace=ws_norm_histo_data, XMin=TOFrange[0], XMax=TOFrange[1]) # Normalized by Current (proton charge) print '-> normalized by current direct beam' NormaliseByCurrent(InputWorkspace=ws_norm_histo_data, OutputWorkspace=ws_norm_histo_data) ws_data_bck = '_' + ws_name + '_NormBckWks' # ws_norm_rebinned = '_' + ws_name + '_NormRebinnedWks' ws_norm_rebinned = ws_name + '_NormRebinnedWks' if (subtract_norm_bck and (backSubMethod == 1)): print '-> substract background to direct beam' ConvertToMatrixWorkspace(InputWorkspace=ws_norm_histo_data, OutputWorkspace=ws_norm_histo_data) ws_norm_bck = '_' + ws_name + '_NormBckWks' bBackLeft = False if (norm_back[0] < (norm_peak[0]-1)): bBackLeft = True ws_norm_bck_1 = ws_norm_bck + "_1" RefRoi(InputWorkspace=ws_norm_histo_data, OutputWorkspace=ws_norm_bck_1, NXPixel=maxX, NYPixel=maxY, ConvertToQ=False, IntegrateY=False, SumPixels=True, XPixelMin=norm_low_res[0], XPixelMax=norm_low_res[1], YPixelMin=norm_back[0], YPixelMax=norm_peak[0]-1, NormalizeSum=True) ws_norm_bck_1_rebin = ws_norm_bck_1 + '_rebin' RebinToWorkspace(WorkspaceToRebin=ws_norm_bck_1, WorkspaceToMatch=ws_norm_histo_data, OutputWorkspace=ws_norm_bck_1_rebin) bBackRight = False if ((norm_peak[1]+1) < norm_back[1]): bBackRight = True ws_norm_bck_2 = ws_norm_bck + "_2" RefRoi(InputWorkspace=ws_norm_histo_data, OutputWorkspace=ws_norm_bck_2, NXPixel=maxX, NYPixel=maxY, ConvertToQ=False, IntegrateY=False, SumPixels=True, XPixelMin=norm_low_res[0], XPixelMax=norm_low_res[1], YPixelMin=norm_peak[1]+1, YPixelMax=norm_back[1], NormalizeSum=True) ws_norm_bck_2_rebin = ws_norm_bck_2 + '_rebin' RebinToWorkspace(WorkspaceToRebin=ws_norm_bck_2, WorkspaceToMatch=ws_norm_histo_data, OutputWorkspace=ws_norm_bck_2_rebin) if (bBackLeft and bBackRight): Plus(RHSWorkspace=ws_norm_bck_1_rebin, LHSWorkspace=ws_norm_bck_2_rebin, OutputWorkspace=ws_norm_bck) Scale(InputWorkspace=ws_norm_bck, OutputWorkspace=ws_norm_bck+'_scale', Factor=0.5, Operation="Multiply") Minus(LHSWorkspace=ws_norm_histo_data, RHSWorkspace=ws_norm_bck+'_scale', OutputWorkspace=ws_norm_rebinned) if mtd.workspaceExists(ws_norm_bck_1_rebin): mtd.deleteWorkspace(ws_norm_bck_1_rebin) if mtd.workspaceExists(ws_norm_bck_2_rebin): mtd.deleteWorkspace(ws_norm_bck_2_rebin) if mtd.workspaceExists(ws_norm_bck_1): mtd.deleteWorkspace(ws_norm_bck_1) if mtd.workspaceExists(ws_norm_bck_2): mtd.deleteWorkspace(ws_norm_bck_2) if mtd.workspaceExists(ws_norm_histo_data): mtd.deleteWorkspace(ws_norm_histo_data) if mtd.workspaceExists(ws_norm_bck+'_scale'): mtd.deleteWorkspace(ws_norm_bck+'_scale') elif (bBackLeft): Minus(LHSWorkspace=ws_norm_histo_data, RHSWorkspace=ws_norm_bck_1_rebin, OutputWorkspace=ws_norm_rebinned) if mtd.workspaceExists(ws_norm_bck_1_rebin): mtd.deleteWorkspace(ws_norm_bck_1_rebin) if mtd.workspaceExists(ws_norm_bck_1): mtd.deleteWorkspace(ws_norm_bck_1) if mtd.workspaceExists(ws_norm_histo_data): mtd.deleteWorkspace(ws_norm_histo_data) elif (bBackRight): Minus(LHSWorkspace=ws_norm_histo_data, RHSWorkspace=ws_norm_bck_2_rebin, OutputWorkspace=ws_norm_rebinned) if mtd.workspaceExists(ws_norm_bck_2_rebin): mtd.deleteWorkspace(ws_norm_bck_2_rebin) if mtd.workspaceExists(ws_norm_bck_2): mtd.deleteWorkspace(ws_norm_bck_2) if mtd.workspaceExists(ws_norm_histo_data): mtd.deleteWorkspace(ws_norm_histo_data) #Here I need to set to zeros all the negative entries ResetNegatives(InputWorkspace=ws_norm_rebinned, OutputWorkspace=ws_norm_rebinned, AddMinimum=0) wks_utility.createIntegratedWorkspace(mtd[ws_norm_rebinned], ws_norm_rebinned, fromXpixel=norm_low_res[0], toXpixel=norm_low_res[1], fromYpixel=norm_peak[0], toYpixel=norm_peak[1], maxX=maxX, maxY=maxY, bCleaning=True) if (subtract_norm_bck and (backSubMethod == 2)): #integrate over the x axis in the low axis range specified wks_utility.createIntegratedWorkspace(mtd[ws_norm_histo_data], ws_norm_histo_data+'_1D', fromXpixel=norm_low_res[0], toXpixel=norm_low_res[1], fromYpixel=0, toYpixel=255, maxX=maxX, maxY=maxY) #for each TOF, get the average counts over the two #background regions (top and bottom) _mt = mtd[ws_norm_histo_data+'_1D'] _x_axis = _mt.readX(0)[:] _nbr_tof = len(_x_axis) _tof_range = range(_nbr_tof-1) _back_array = zeros(_nbr_tof-1) _back_array_error = zeros(_nbr_tof-1) #work on left side _LfromPx = norm_back[0] _LtoPx = norm_peak[0] #work on right side _RfromPx = norm_peak[1] _RtoPx = norm_back[1] bLeftBack = False if (_LfromPx < _LtoPx): _Larray = arange(_LtoPx - _LfromPx) + _LfromPx bLeftBack = True bRightBack = False if (_RfromPx < _RtoPx): _Rarray = arange(_RtoPx - _RfromPx) + _RfromPx bRightBack = True if (bLeftBack and bRightBack): _y_px_range = numpy.append(_Larray,_Rarray) else: if (bLeftBack): _y_px_range = _Larray else: _y_px_range = _Rarray for i in _tof_range: _sum = 0. _sum_error = 0. _pts_summed = 0. _val = 0. _err = 0. for j in _y_px_range: _val = float(_mt.readY(int(j))[int(i)]) _err = float(_mt.readE(int(j))[int(i)]) if (_val != 0 and _err !=0): _new_val = float(_val / _err) _new_err = 1./_err _sum += _new_val _sum_error += _new_err if (_val !=0. and _err !=0.): _back_array[i] = float(_sum / _sum_error) _back_array_error[i] = float(1./ _sum_error) #substract this number from the rest CreateWorkspace(OutputWorkspace='background', DataX=_x_axis, DataY=_back_array, DataE=_back_array_error, UnitX="TOF", ParentWorkspace=mtd[ws_norm_histo_data], NSpec=1) # #recreate workspace at the end # mt1 = mtd[ws_norm_histo_data+'_1D'] # mt2 = mtd['background'] Minus(LHSWorkspace=ws_norm_histo_data+'_1D', RHSWorkspace='background', OutputWorkspace=ws_norm_rebinned) if mtd.workspaceExists(ws_norm_histo_data+'_1D'): mtd.deleteWorkspace(ws_norm_histo_data+'_1D') if mtd.workspaceExists('background'): mtd.deleteWorkspace('background') ResetNegatives(InputWorkspace=ws_norm_rebinned, OutputWorkspace=ws_norm_rebinned, AddMinimum=0) else: #Create a new event workspace of only the range of pixel of interest #background range (along the y-axis) and of only the pixel #of interest along the x-axis (to avoid the frame effect) ws_integrated_data = '_' + ws_name + '_IntegratedNormWks' wks_utility.createIntegratedWorkspace(mtd[ws_norm_histo_data], ws_integrated_data, fromXpixel=norm_low_res[0], toXpixel=norm_low_res[1], fromYpixel=norm_peak[0], toYpixel=norm_peak[1], maxX=maxX, maxY=maxY) RebinToWorkspace(WorkspaceToRebin=ws_integrated_data, WorkspaceToMatch=ws_data, OutputWorkspace=ws_norm_rebinned) if mtd.workspaceExists(ws_integrated_data): mtd.deleteWorkspace(ws_integrated_data) #Normalization print '-> Sum spectra' SumSpectra(InputWorkspace=ws_norm_rebinned, OutputWorkspace=ws_norm_rebinned) #### divide data by normalize histo workspace print '-> Divide data by direct beam' Divide(LHSWorkspace=ws_data, RHSWorkspace=ws_norm_rebinned, OutputWorkspace=ws_data) #now we can convert to Q theta = math.fabs(tthd_rad - thi_rad)/2. AngleOffset_deg = float(self.getProperty("AngleOffset")) AngleOffset_rad = (AngleOffset_deg * math.pi) / 180. theta += AngleOffset_rad #this is where we need to apply the scaling factor sfFile = self.getProperty("ScalingFactorFile") incidentMedium = self.getProperty("IncidentMediumSelected") if os.path.isfile(sfFile): print '-> Apply automatic SF!' print '--> using SF config file: ' + sfFile ws_data_scaled = wks_utility.applySF(ws_data, incidentMedium, sfFile, slitsValuePrecision, slitsWidthFlag) else: print '-> Automatic SF not applied!' print '--> unknown or no SF config file defined !' ws_data_scaled = ws_data if dMD is not None and theta is not None: if bDebug: print 'DEBUG: theta= {0:4f}'.format(theta) _tof_axis = mtd[ws_data].readX(0) _const = float(4) * math.pi * m * dMD / h sz_tof = numpy.shape(_tof_axis)[0] _q_axis = zeros(sz_tof-1) for t in range(sz_tof-1): tof1 = _tof_axis[t] tof2 = _tof_axis[t+1] tofm = (tof1+tof2)/2. _Q = _const * math.sin(theta) / (tofm*1e-6) _q_axis[t] = _Q*1e-10 q_max = max(_q_axis) if (q_min >= q_max): q_min = min(_q_axis) if bDebug: print 'DEBUG: [q_min:q_bin:q_max]=[{0:4f},{1:4f},{2:4f}]'.format(q_min, q_step, q_max) if (backSubMethod == 1): ws_integrated_data = ws_name + '_IntegratedDataWks' print '-> keep only range of pixel of interest' wks_utility.createIntegratedWorkspace(mtd[ws_data_scaled], ws_integrated_data, fromXpixel=data_low_res[0], toXpixel=data_low_res[1], fromYpixel=data_peak[0], toYpixel=data_peak[1], maxX=maxX, maxY=maxY) ws_data_cleaned = ws_name + '_cleaned' # wks_utility.cleanup_data(InputWorkspace=ws_integrated_data, # OutputWorkspace=ws_data_cleaned, # maxY=maxY) # mtd.deleteWorkspace(ws_data_scaled) # mtd.deleteWorkspace(ws_data) ws_data_Q = ws_data + '_Q' print '-> convert to Q' # wks_utility.convertWorkspaceToQ(ws_data_scaled, wks_utility.convertWorkspaceToQ(ws_integrated_data, # wks_utility.convertWorkspaceToQ(ws_data_cleaned, ws_data_Q, fromYpixel=data_peak[0], toYpixel=data_peak[1], cpix=data_cpix, source_to_detector=dMD, sample_to_detector=dSD, theta=theta, geo_correction=False, q_binning=[q_min,q_step,q_max]) if mtd.workspaceExists(ws_integrated_data): mtd.deleteWorkspace(ws_integrated_data) else: ws_data_Q = ws_data + '_Q' print '-> convert to Q' wks_utility.convertWorkspaceToQ(ws_data_scaled, ws_data_Q, fromYpixel=data_peak[0], toYpixel=data_peak[1], cpix=data_cpix, source_to_detector=dMD, sample_to_detector=dSD, theta=theta, geo_correction=True, q_binning=[q_min,q_step,q_max]) if mtd.workspaceExists(ws_data_scaled): mtd.deleteWorkspace(ws_data_scaled) print '-> replace special values' mt = mtd[ws_data_Q] ReplaceSpecialValues(InputWorkspace=ws_data_Q, NaNValue=0, NaNError=0, InfinityValue=0, InfinityError=0, OutputWorkspace=ws_data_Q) output_ws = self.getPropertyValue("OutputWorkspace") #add a unique time stamp to the data to sort them for the #stitching process import time _time = int(time.time()) output_ws = output_ws + '_#' + str(_time) + 'ts' if mtd.workspaceExists(output_ws): mtd.deleteWorkspace(output_ws) print '-> sum spectra' SumSpectra(InputWorkspace=ws_data_Q, OutputWorkspace=output_ws) #keep only none zero values try: print '-> keep only non-zeros values' mt = mtd[output_ws] sz = shape(mt.readY(0)[:])[0] data_x = [] data_y = [] data_y_error = [] for i in range(sz): _y = mt.readY(0)[i] #print '_y={0:3f} at i={1:2d}'.format(_y, i) if _y != 0.: data_x.append(mt.readX(0)[i]) data_y.append(_y) data_y_error.append(mt.readE(0)[i]) #if at least one non zero value found if data_x != []: print '-> cleanup data (remove zeros)' CreateWorkspace(OutputWorkspace=output_ws, DataX=data_x, DataY=data_y, DataE=data_y_error, Nspec=1, UnitX="MomentumTransfer") except: pass #removing first and last Q points (edge effect) mt=mtd[output_ws] x_axis = mt.readX(0)[:] if (len(x_axis) > 2): print '-> remove first and last point (edge effet)' qmin = x_axis[1] qmax = x_axis[-2] CropWorkspace(InputWorkspace=output_ws, OutputWorkspace=output_ws, XMin=qmin, XMax=qmax) #space self.setProperty("OutputWorkspace", mtd[output_ws]) #cleanup all workspace used print '-> Cleaning useless workspaces' if mtd.workspaceExists(ws_event_data): mtd.deleteWorkspace(ws_event_data) if mtd.workspaceExists(ws_data_Q): mtd.deleteWorkspace(ws_data_Q) if mtd.workspaceExists(ws_data): mtd.deleteWorkspace(ws_data) if (NormFlag): if mtd.workspaceExists(ws_norm_event_data): mtd.deleteWorkspace(ws_norm_event_data) print
def PyExec(self): print '-- > starting new Reflectometer Reduction ...' import os import numpy import math from reduction.instruments.reflectometer import wks_utility from mantid import mtd #remove all previous workspaces list_mt = mtd.getObjectNames() for _mt in list_mt: if _mt.find('_scaled') != -1: DeleteWorkspace(_mt) if _mt.find('_reflectivity') != -1: DeleteWorkspace(_mt) # retrieve settings from GUI print '-> Retrieving settings from GUI' # DATA dataRunNumbers = self.getProperty("RunNumbers").value dataPeakRange = self.getProperty("SignalPeakPixelRange").value dataBackRange = self.getProperty("SignalBackgroundPixelRange").value dataBackFlag = self.getProperty("SubtractSignalBackground").value #Due to the frame effect, it's sometimes necessary to narrow the range #over which we add all the pixels along the low resolution #Parameter dataLowResFlag = self.getProperty("LowResDataAxisPixelRangeFlag") if dataLowResFlag: dataLowResRange = self.getProperty("LowResDataAxisPixelRange").value else: dataLowResRange = [0,maxX-1] # NORM normalizationRunNumber = self.getProperty("NormalizationRunNumber").value normFlag = self.getProperty("NormFlag") normBackRange = self.getProperty("NormBackgroundPixelRange").value normPeakRange = self.getProperty("NormPeakPixelRange").value normBackFlag = self.getProperty("SubtractNormBackground").value #Due to the frame effect, it's sometimes necessary to narrow the range #over which we add all the pixels along the low resolution #Parameter normLowResFlag = self.getProperty("LowResNormAxisPixelRangeFlag") if normLowResFlag: normLowResRange = self.getProperty("LowResNormAxisPixelRange").value else: normLowResRange = [0,maxX-1] #GENERAL TOFrangeFlag = self.getProperty("TofRangeFlag") if (TOFrangeFlag): TOFrange = self.getProperty("TOFRange").value #microS else: TOFrange = [0, 200000] # TOF binning parameters binTOFrange = [0, 200000] binTOFsteps = 50 # geometry correction geometryCorrectionFlag = self.getProperty("GeometryCorrectionFlag").value qMin = self.getProperty("QMin").value qStep = self.getProperty("QStep").value if (qStep > 0): #force logarithmic binning qStep = -qStep; # angle offset angleOffsetDeg = self.getProperty("AngleOffset").value #dimension of the detector (256 by 304 pixels) maxX = 304 maxY = 256 h = 6.626e-34 #m^2 kg s^-1 m = 1.675e-27 #kg # sfCalculator settings slitsValuePrecision = sfCalculator.PRECISION sfFile = self.getProperty("ScalingFactorFile").value incidentMedium = self.getProperty("IncidentMediumSelected").value slitsWidthFlag = self.getProperty("SlitsWidthFlag").value # ==== done retrievin the settings ===== # ==== start reduction ==== # work with data # load data ws_event_data = wks_utility.loadNeXus(dataRunNumbers, 'data') ## retrieve general informations # calculate the central pixel (using weighted average) print '-> retrieving general informations' data_central_pixel = wks_utility.getCentralPixel(ws_event_data, dataPeakRange) # get the distance moderator-detector and sample-detector [dMD, dSD] = wks_utility.getDistances(ws_event_data) # get theta theta = wks_utility.getTheta(ws_event_data, angleOffsetDeg) # get proton charge pc = wks_utility.getProtonCharge(ws_event_data) error_0 = 1. / pc # rebin data ws_histo_data = wks_utility.rebinNeXus(ws_event_data, [binTOFrange[0], binTOFsteps, binTOFrange[1]], 'data') # get q range q_range = wks_utility.getQrange(ws_histo_data, theta, dMD, qMin, qStep) # slit size [first_slit_size, last_slit_size] = wks_utility.getSlitsSize(ws_histo_data) # keep only TOF range ws_histo_data = wks_utility.cropTOF(ws_histo_data, TOFrange[0], TOFrange[1], 'data') # normalize by current proton charge ws_histo_data = wks_utility.normalizeNeXus(ws_histo_data, 'data') # integrate over low resolution range [data_tof_axis, data_y_axis, data_y_error_axis] = wks_utility.integrateOverLowResRange(ws_histo_data, dataLowResRange, 'data') # #DEBUG ONLY # wks_utility.ouput_big_ascii_file('/mnt/hgfs/j35/Matlab/DebugMantid/Strange0ValuesToData/data_file_after_low_resolution_integration.txt', # data_tof_axis, # data_y_axis, # data_y_error_axis) tof_axis = data_tof_axis[0:-1].copy() tof_axis_full = data_tof_axis.copy() # data_tof_axis.shape -> (62,) # data_y_axis.shape -> (256,61) #substract background [data_y_axis, data_y_error_axis] = wks_utility.substractBackground(tof_axis , data_y_axis, data_y_error_axis, dataPeakRange, dataBackFlag, dataBackRange, error_0, 'data') # #DEBUG ONLY # wks_utility.ouput_big_ascii_file('/mnt/hgfs/j35/Matlab/DebugMantid/Strange0ValuesToData/data_file_back_sub_not_integrated.txt', # data_tof_axis, # data_y_axis, # data_y_error_axis) # work with normalization # load normalization ws_event_norm = wks_utility.loadNeXus(int(normalizationRunNumber), 'normalization') # get proton charge pc = wks_utility.getProtonCharge(ws_event_norm) error_0 = 1. / pc # rebin normalization ws_histo_norm = wks_utility.rebinNeXus(ws_event_norm, [binTOFrange[0], binTOFsteps, binTOFrange[1]], 'normalization') # keep only TOF range ws_histo_norm = wks_utility.cropTOF(ws_histo_norm, TOFrange[0], TOFrange[1], 'normalization') # normalize by current proton charge ws_histo_norm = wks_utility.normalizeNeXus(ws_histo_norm, 'normalization') # integrate over low resolution range [norm_tof_axis, norm_y_axis, norm_y_error_axis] = wks_utility.integrateOverLowResRange(ws_histo_norm, normLowResRange, 'normalization') # substract background [norm_y_axis, norm_y_error_axis] = wks_utility.substractBackground(norm_tof_axis[0:-1], norm_y_axis, norm_y_error_axis, normPeakRange, normBackFlag, normBackRange, error_0, 'normalization') [av_norm, av_norm_error] = wks_utility.fullSumWithError(norm_y_axis, norm_y_error_axis) # ## DEBUGGING ONLY # wks_utility.ouput_ascii_file('/mnt/hgfs/j35/Matlab/DebugMantid/Strange0ValuesToData/norm_file_back_sub_not_integrated.txt', # norm_tof_axis, # av_norm, # av_norm_error) [final_data_y_axis, final_data_y_error_axis] = wks_utility.divideDataByNormalization(data_y_axis, data_y_error_axis, av_norm, av_norm_error) # #DEBUG ONLY # wks_utility.ouput_big_ascii_file('/mnt/hgfs/j35/Matlab/DebugMantid/Strange0ValuesToData/data_divided_by_norm_not_integrated.txt', # data_tof_axis, # final_data_y_axis, # final_data_y_error_axis) # apply Scaling factor [tof_axis_full, y_axis, y_error_axis] = wks_utility.applyScalingFactor(tof_axis_full, final_data_y_axis, final_data_y_error_axis, incidentMedium, sfFile, slitsValuePrecision, slitsWidthFlag) # #DEBUG ONLY # wks_utility.ouput_big_ascii_file('/mnt/hgfs/j35/Matlab/DebugMantid/Strange0ValuesToData/after_applying_scaling_factor.txt', # data_tof_axis, # y_axis, # y_error_axis) if geometryCorrectionFlag: # convert To Q with correction [q_axis, y_axis, y_error_axis] = wks_utility.convertToQ(tof_axis_full, y_axis, y_error_axis, peak_range = dataPeakRange, central_pixel = data_central_pixel, source_to_detector_distance = dMD, sample_to_detector_distance = dSD, theta = theta, first_slit_size = first_slit_size, last_slit_size = last_slit_size) else: # convert to Q without correction [q_axis, y_axis, y_error_axis] = wks_utility.convertToQWithoutCorrection(tof_axis_full, y_axis, y_error_axis, peak_range = dataPeakRange, central_pixel = data_central_pixel, source_to_detector_distance = dMD, sample_to_detector_distance = dSD, theta = theta, first_slit_size = first_slit_size, last_slit_size = last_slit_size) # wks_utility.ouput_big_Q_ascii_file('/mnt/hgfs/j35/Matlab/DebugMantid/Strange0ValuesToData/after_conversion_to_q.txt', # q_axis, # y_axis, # y_error_axis) sz = q_axis.shape nbr_pixel = sz[0] # create workspace q_workspace = wks_utility.createQworkspace(q_axis, y_axis, y_error_axis) q_rebin = Rebin(InputWorkspace=q_workspace, Params=q_range, PreserveEvents=True) # keep only the q values that have non zero counts nonzero_q_rebin_wks = wks_utility.cropAxisToOnlyNonzeroElements(q_rebin, dataPeakRange) new_q_axis = nonzero_q_rebin_wks.readX(0)[:] # integrate spectra (normal mean) and remove first and last Q value [final_x_axis, final_y_axis, final_error_axis] = wks_utility.integrateOverPeakRange(nonzero_q_rebin_wks, dataPeakRange) # cleanup data [final_y_axis, final_y_error_axis] = wks_utility.cleanupData1D(final_y_axis, final_error_axis) # create final workspace import time _time = int(time.time()) name_output_ws = self.getPropertyValue("OutputWorkspace") name_output_ws = name_output_ws + '_#' + str(_time) + 'ts' final_workspace = wks_utility.createFinalWorkspace(final_x_axis, final_y_axis, final_y_error_axis, name_output_ws) self.setProperty('OutputWorkspace', mtd[name_output_ws])
def PyExec(self): print '-- > starting new Reflectometer Reduction ...' import os import numpy import math from reduction.instruments.reflectometer import wks_utility from mantid import mtd #remove all previous workspaces list_mt = mtd.getObjectNames() for _mt in list_mt: if _mt.find('_scaled') != -1: DeleteWorkspace(_mt) if _mt.find('_reflectivity') != -1: DeleteWorkspace(_mt) # retrieve settings from GUI print '-> Retrieving settings from GUI' # DATA dataRunNumbers = self.getProperty("RunNumbers").value dataPeakRange = self.getProperty("SignalPeakPixelRange").value dataBackRange = self.getProperty("SignalBackgroundPixelRange").value dataBackFlag = self.getProperty("SubtractSignalBackground").value #Due to the frame effect, it's sometimes necessary to narrow the range #over which we add all the pixels along the low resolution #Parameter dataLowResFlag = self.getProperty("LowResDataAxisPixelRangeFlag") if dataLowResFlag: dataLowResRange = self.getProperty( "LowResDataAxisPixelRange").value else: dataLowResRange = [0, maxX - 1] # NORM normalizationRunNumber = self.getProperty( "NormalizationRunNumber").value normFlag = self.getProperty("NormFlag") normBackRange = self.getProperty("NormBackgroundPixelRange").value normPeakRange = self.getProperty("NormPeakPixelRange").value normBackFlag = self.getProperty("SubtractNormBackground").value #Due to the frame effect, it's sometimes necessary to narrow the range #over which we add all the pixels along the low resolution #Parameter normLowResFlag = self.getProperty("LowResNormAxisPixelRangeFlag") if normLowResFlag: normLowResRange = self.getProperty( "LowResNormAxisPixelRange").value else: normLowResRange = [0, maxX - 1] #GENERAL TOFrangeFlag = self.getProperty("TofRangeFlag") if (TOFrangeFlag): TOFrange = self.getProperty("TOFRange").value #microS else: TOFrange = [0, 200000] # TOF binning parameters binTOFrange = [0, 200000] binTOFsteps = 40 # geometry correction geometryCorrectionFlag = self.getProperty( "GeometryCorrectionFlag").value qMin = self.getProperty("QMin").value qStep = self.getProperty("QStep").value if (qStep > 0): #force logarithmic binning qStep = -qStep # angle offset angleOffsetDeg = self.getProperty("AngleOffset").value h = 6.626e-34 #m^2 kg s^-1 m = 1.675e-27 #kg # sfCalculator settings slitsValuePrecision = sfCalculator.PRECISION sfFile = self.getProperty("ScalingFactorFile").value incidentMedium = self.getProperty("IncidentMediumSelected").value slitsWidthFlag = self.getProperty("SlitsWidthFlag").value # ==== done retrievin the settings ===== # ==== start reduction ==== # work with data # load data ws_event_data = wks_utility.loadNeXus(dataRunNumbers, 'data') is_nexus_detector_rotated_flag = wks_utility.isNexusTakeAfterRefDate( ws_event_data.getRun().getProperty('run_start').value) print '-> is NeXus taken with new detector geometry: ' + str( is_nexus_detector_rotated_flag) #dimension of the detector (256 by 304 pixels) if is_nexus_detector_rotated_flag: maxX = 256 maxY = 304 else: maxX = 304 maxY = 256 ## retrieve general informations # calculate the central pixel (using weighted average) print '-> retrieving general informations' data_central_pixel = wks_utility.getCentralPixel( ws_event_data, dataPeakRange, is_nexus_detector_rotated_flag) # get the distance moderator-detector and sample-detector [dMD, dSD] = wks_utility.getDistances(ws_event_data) # get theta theta = wks_utility.getTheta(ws_event_data, angleOffsetDeg) # get proton charge pc = wks_utility.getProtonCharge(ws_event_data) error_0 = 1. / pc # rebin data ws_histo_data = wks_utility.rebinNeXus( ws_event_data, [binTOFrange[0], binTOFsteps, binTOFrange[1]], 'data') # get q range q_range = wks_utility.getQrange(ws_histo_data, theta, dMD, qMin, qStep) # slit size [first_slit_size, last_slit_size] = wks_utility.getSlitsSize(ws_histo_data) # keep only TOF range ws_histo_data = wks_utility.cropTOF(ws_histo_data, TOFrange[0], TOFrange[1], 'data') # normalize by current proton charge ws_histo_data = wks_utility.normalizeNeXus(ws_histo_data, 'data') # integrate over low resolution range [data_tof_axis, data_y_axis, data_y_error_axis] = wks_utility.integrateOverLowResRange( ws_histo_data, dataLowResRange, 'data', is_nexus_detector_rotated_flag) # #DEBUG ONLY # wks_utility.ouput_big_ascii_file('/mnt/hgfs/j35/Matlab/DebugMantid/Strange0ValuesToData/data_file_after_low_resolution_integration.txt', # data_tof_axis, # data_y_axis, # data_y_error_axis) tof_axis = data_tof_axis[0:-1].copy() tof_axis_full = data_tof_axis.copy() # data_tof_axis.shape -> (62,) # data_y_axis.shape -> (256,61) #substract background [data_y_axis, data_y_error_axis] = wks_utility.substractBackground( tof_axis, data_y_axis, data_y_error_axis, dataPeakRange, dataBackFlag, dataBackRange, error_0, 'data') # #DEBUG ONLY # wks_utility.ouput_big_ascii_file('/mnt/hgfs/j35/Matlab/DebugMantid/Strange0ValuesToData/data_file_back_sub_not_integrated.txt', # data_tof_axis, # data_y_axis, # data_y_error_axis) # work with normalization # load normalization ws_event_norm = wks_utility.loadNeXus(int(normalizationRunNumber), 'normalization') # get proton charge pc = wks_utility.getProtonCharge(ws_event_norm) error_0 = 1. / pc # rebin normalization ws_histo_norm = wks_utility.rebinNeXus( ws_event_norm, [binTOFrange[0], binTOFsteps, binTOFrange[1]], 'normalization') # keep only TOF range ws_histo_norm = wks_utility.cropTOF(ws_histo_norm, TOFrange[0], TOFrange[1], 'normalization') # normalize by current proton charge ws_histo_norm = wks_utility.normalizeNeXus(ws_histo_norm, 'normalization') # integrate over low resolution range [norm_tof_axis, norm_y_axis, norm_y_error_axis] = wks_utility.integrateOverLowResRange( ws_histo_norm, normLowResRange, 'normalization', is_nexus_detector_rotated_flag) # substract background [norm_y_axis, norm_y_error_axis] = wks_utility.substractBackground( norm_tof_axis[0:-1], norm_y_axis, norm_y_error_axis, normPeakRange, normBackFlag, normBackRange, error_0, 'normalization') [av_norm, av_norm_error] = wks_utility.fullSumWithError(norm_y_axis, norm_y_error_axis) # ## DEBUGGING ONLY # wks_utility.ouput_ascii_file('/mnt/hgfs/j35/Matlab/DebugMantid/Strange0ValuesToData/norm_file_back_sub_not_integrated.txt', # norm_tof_axis, # av_norm, # av_norm_error) [final_data_y_axis, final_data_y_error_axis ] = wks_utility.divideDataByNormalization(data_y_axis, data_y_error_axis, av_norm, av_norm_error) # #DEBUG ONLY # wks_utility.ouput_big_ascii_file('/mnt/hgfs/j35/Matlab/DebugMantid/Strange0ValuesToData/data_divided_by_norm_not_integrated.txt', # data_tof_axis, # final_data_y_axis, # final_data_y_error_axis) # apply Scaling factor [tof_axis_full, y_axis, y_error_axis] = wks_utility.applyScalingFactor( tof_axis_full, final_data_y_axis, final_data_y_error_axis, incidentMedium, sfFile, slitsValuePrecision, slitsWidthFlag) # #DEBUG ONLY # wks_utility.ouput_big_ascii_file('/mnt/hgfs/j35/Matlab/DebugMantid/Strange0ValuesToData/after_applying_scaling_factor.txt', # data_tof_axis, # y_axis, # y_error_axis) if geometryCorrectionFlag: # convert To Q with correction [q_axis, y_axis, y_error_axis ] = wks_utility.convertToQ(tof_axis_full, y_axis, y_error_axis, peak_range=dataPeakRange, central_pixel=data_central_pixel, source_to_detector_distance=dMD, sample_to_detector_distance=dSD, theta=theta, first_slit_size=first_slit_size, last_slit_size=last_slit_size) else: # convert to Q without correction [q_axis, y_axis, y_error_axis] = wks_utility.convertToQWithoutCorrection( tof_axis_full, y_axis, y_error_axis, peak_range=dataPeakRange, source_to_detector_distance=dMD, sample_to_detector_distance=dSD, theta=theta, first_slit_size=first_slit_size, last_slit_size=last_slit_size) # wks_utility.ouput_big_Q_ascii_file('/mnt/hgfs/j35/Matlab/DebugMantid/Strange0ValuesToData/after_conversion_to_q.txt', # q_axis, # y_axis, # y_error_axis) sz = q_axis.shape nbr_pixel = sz[0] # create workspace q_workspace = wks_utility.createQworkspace(q_axis, y_axis, y_error_axis) q_rebin = Rebin(InputWorkspace=q_workspace, Params=q_range, PreserveEvents=True) # keep only the q values that have non zero counts nonzero_q_rebin_wks = wks_utility.cropAxisToOnlyNonzeroElements( q_rebin, dataPeakRange) new_q_axis = nonzero_q_rebin_wks.readX(0)[:] # integrate spectra (normal mean) and remove first and last Q value [final_x_axis, final_y_axis, final_error_axis ] = wks_utility.integrateOverPeakRange(nonzero_q_rebin_wks, dataPeakRange) # cleanup data [final_y_axis, final_y_error_axis ] = wks_utility.cleanupData1D(final_y_axis, final_error_axis) # create final workspace import time _time = int(time.time()) name_output_ws = self.getPropertyValue("OutputWorkspace") name_output_ws = name_output_ws + '_#' + str(_time) + 'ts' final_workspace = wks_utility.createFinalWorkspace( final_x_axis, final_y_axis, final_y_error_axis, name_output_ws) self.setProperty('OutputWorkspace', mtd[name_output_ws])
def PyExec(self): import os import numpy import math from reduction.instruments.reflectometer import wks_utility from mantid import mtd #remove all previous workspaces list_mt = mtd.getObjectNames() for _mt in list_mt: if _mt.find('_scaled') != -1: mtd.remove(_mt) if _mt.find('_reflectivity') != -1: mtd.remove(_mt) from mantidsimple import mtd bDebug = False if bDebug: print '====== Running in mode DEBUGGING =======' run_numbers = self.getProperty("RunNumbers") if bDebug: print 'run_numbers (before getSequenceRuns): ' print str(run_numbers) print run_numbers = wks_utility.getSequenceRuns(run_numbers) if bDebug: print 'run_numbers (after getSequenceRuns): ' print str(run_numbers) print for _run in run_numbers: #make sure we are working with integer _run = int(_run) print '********* Working with run: ' + str(_run) + ' *********' #Pick a good workspace name ws_name = "refl%d" % _run ws_event_data = ws_name+"_evt" try: data_file = FileFinder.findRuns("REF_L%d" %_run)[0] if bDebug: print 'DEBUG: full file name is ' + data_file except RuntimeError: msg = "RefLReduction: could not find run %d\n" % _run msg += "Add your data folder to your User Data Directories in the File menu" if bDebug: print 'DEBUG: file name could not be found !' raise RuntimeError(msg) if not mtd.workspaceExists(ws_event_data): LoadEventNexus(Filename=data_file, OutputWorkspace=ws_event_data) #retrieve list of metadata mt_run = mtd[ws_event_data].getRun() #run_title run_title = mt_run.getProperty('run_title').value _line = ' Run title: ' + run_title print _line #run_start run_start = mt_run.getProperty('run_start').value _line = ' Run start: ' + run_start print _line #duration duration_value = mt_run.getProperty('duration').value duration_units = mt_run.getProperty('duration').units _line = ' Duration: {0:.2f}'.format(duration_value) _line += ' ' + duration_units print _line #Lambda Requested lambda_request_value = mt_run.getProperty('LambdaRequest').value[0] lambda_request_units = mt_run.getProperty('LambdaRequest').units _line = ' Lambda requested: {0:.2f}'.format(lambda_request_value) _line += ' ' + lambda_request_units print _line #tthd tthd_value = mt_run.getProperty('tthd').value[0] tthd_units = mt_run.getProperty('tthd').units _line = ' tthd: {0:.4f}'.format(tthd_value) _line += ' ' + tthd_units print _line #thi thi_value = mt_run.getProperty('thi').value[0] thi_units = mt_run.getProperty('thi').units _line = ' thi: {0:.4f}'.format(thi_value) _line += ' ' + thi_units print _line #(tthd-thi)/2 _cal = (float(tthd_value)-float(thi_value))/2. _line = ' (tthd-thi)/2: {0:.2f}'.format(_cal) _line += ' ' + thi_units print _line #ths ths_value = mt_run.getProperty('ths').value[0] ths_units = mt_run.getProperty('ths').units _line = ' ths: {0:.4f}'.format(ths_value) _line += ' ' + ths_units print _line #s1h s1h_value, s1h_units = wks_utility.getS1h(mtd[ws_event_data]) _line = ' s1h: {0:.4f}'.format(s1h_value) _line += ' ' + s1h_units print _line #s2h s2h_value, s2h_units = wks_utility.getS2h(mtd[ws_event_data]) _line = ' s2h: {0:.4f}'.format(s2h_value) _line += ' ' + s2h_units print _line #s1w s1w_value, s1w_units = wks_utility.getS1w(mtd[ws_event_data]) _line = ' s1w: {0:.4f}'.format(s1w_value) _line += ' ' + s1w_units print _line #s2w s2w_value, s2w_units = wks_utility.getS2w(mtd[ws_event_data]) _line = ' s2w: {0:.4f}'.format(s2w_value) _line += ' ' + s2w_units print _line print '********************************' print