def __load_mask(self, mask_file_name): # Check input checkdatatypes.check_file_name(mask_file_name, True, False, False, 'Mask XML file') if self._event_wksp is None: raise RuntimeError( 'Meta data only workspace {} does not exist'.format( self._event_ws_name)) # Load mask XML to workspace mask_ws_name = os.path.basename(mask_file_name.split('.')[0]) mask_ws = LoadMask(Instrument='nrsf2', InputFile=mask_file_name, RefWorkspace=self._event_wksp, OutputWorkspace=mask_ws_name) # Extract mask out # get the Y array from mask workspace: shape = (1048576, 1) self.mask_array = mask_ws.extractY().flatten() # in Mantid's mask workspace: one stands delete, zero stands for keep # we multiply by the value: zero is delete, one is keep self.mask_array = 1 - self.mask_array.astype(int) # clean up DeleteWorkspace(Workspace=mask_ws_name)
def _getMaskWSname(self): masking = self.getProperty("Masking").value maskWSname = None maskFile = None # none and workspace are special if masking == 'None': pass elif masking == "Masking Workspace": maskWSname = str(self.getProperty("MaskingWorkspace").value) # deal with files elif masking == 'Custom - xml masking file': maskWSname = 'CustomMask' maskFile = self.getProperty('MaskingFilename').value elif masking == 'Horizontal' or masking == 'Vertical': maskWSname = masking + 'Mask' # append the work 'Mask' for the wksp name if not mtd.doesExist( maskWSname): # only load if it isn't already loaded maskFile = '/SNS/SNAP/shared/libs/%s_Mask.xml' % masking if maskFile is not None: LoadMask(InputFile=maskFile, Instrument='SNAP', OutputWorkspace=maskWSname) return maskWSname
def load_file_and_apply(self, filename, ws_name): Load(Filename=filename, OutputWorkspace=ws_name, FilterByTofMin=self.getProperty("FilterByTofMin").value, FilterByTofMax=self.getProperty("FilterByTofMax").value) if self._load_inst: LoadInstrument(Workspace=ws_name, Filename=self.getProperty("LoadInstrument").value, RewriteSpectraMap=False) if self._apply_cal: ApplyCalibration( Workspace=ws_name, CalibrationTable=self.getProperty("ApplyCalibration").value) if self._detcal: LoadIsawDetCal(InputWorkspace=ws_name, Filename=self.getProperty("DetCal").value) if self._copy_params: CopyInstrumentParameters(OutputWorkspace=ws_name, InputWorkspace=self.getProperty( "CopyInstrumentParameters").value) if self._masking: if not mtd.doesExist('__mask'): LoadMask(Instrument=mtd[ws_name].getInstrument().getName(), InputFile=self.getProperty("MaskFile").value, OutputWorkspace='__mask') MaskDetectors(Workspace=ws_name, MaskedWorkspace='__mask') if self.XMin != Property.EMPTY_DBL and self.XMax != Property.EMPTY_DBL: ConvertUnits(InputWorkspace=ws_name, OutputWorkspace=ws_name, Target='Momentum') CropWorkspaceForMDNorm(InputWorkspace=ws_name, OutputWorkspace=ws_name, XMin=self.XMin, XMax=self.XMax)
def validate(self): ref = LoadMask(Instrument='NOMAD', InputFile="NOM_144974_mask.xml", RefWorkspace='NOM_144974', StoreInADS=False) return CompareWorkspaces(Workspace1=self.loaded_ws, Workspace2=ref, CheckMasking=True).Result
def PyExec(self): # Facility and database configuration config_new_options = {'default.facility': 'SNS', 'default.instrument': 'BASIS', 'datasearch.searcharchive': 'On'} if self.getProperty('DoFluxNormalization').value is True: self._flux_normalization_type = \ self.getProperty('FluxNormalizationType').value # # Find desired Q-binning # self._qbins = np.array(self.getProperty('MomentumTransferBins').value) # # implement with ContextDecorator after python2 is deprecated) # remove_temp = self.getProperty('RemoveTemporaryWorkspaces').value with pyexec_setup(remove_temp, config_new_options) as self._temps: # # Load the mask to a temporary workspace # self._t_mask = LoadMask(Instrument='BASIS', InputFile=self.getProperty('MaskFile'). value, OutputWorkspace=tws('mask')) # # Find the version of the Data Acquisition System # self._find_das_version() # # Calculate the valid range of wavelengths for incoming neutrons # self._calculate_wavelength_band() # # Load and process vanadium runs, if applicable # if self.getProperty('VanadiumRuns').value != '': self._load_vanadium_runs() # # Process the sample # runs = self.getProperty('RunNumbers').value _t_sample = self._load_runs(runs, '_t_sample') _t_sample = self._apply_corrections_vanadium(_t_sample) if self.getProperty('BackgroundRuns').value != '': _t_sample, _t_bkg = self._subtract_background(_t_sample) if self.getPropertyValue('OutputBackground') != '': _t_bkg_angle = self._convert_to_angle(_t_bkg, '_t_bkg_angle') self._output_workspace(_t_bkg_angle, 'OutputBackground', suffix='_angle') _t_bkg = self._convert_to_q(_t_bkg) self._output_workspace(_t_bkg, 'OutputBackground') _t_sample_angle = self._convert_to_angle(_t_sample, '_t_sample_angle') self._output_workspace(_t_sample_angle, 'OutputWorkspace', suffix='_angle') _t_sample = self._convert_to_q(_t_sample) self._output_workspace(_t_sample, 'OutputWorkspace')
def test_from_mask_workspace(): from mantid.simpleapi import LoadMask from os import path dir_path = path.dirname(path.realpath(__file__)) mask = LoadMask('HYS', path.join(dir_path, 'HYS_mask.xml')) da = scn.from_mantid(mask) assert da.data.dtype == sc.DType.bool assert da.dims == ['spectrum'] assert da.variances is None
def _test_impl(self, tmp_dir: Path): file_xml_mask = (tmp_dir / "NOMADTEST.xml").resolve() file_txt_mask = (tmp_dir / "NOMADTEST.txt").resolve() LoadNexusProcessed(Filename='NOM_144974_SingleBin.nxs', OutputWorkspace='NOM_144974') NOMADMedianDetectorTest(InputWorkspace='NOM_144974', ConfigurationFile='NOMAD_mask_gen_config.yml', SolidAngleNorm=False, OutputMaskXML=str(file_xml_mask), OutputMaskASCII=str(file_txt_mask)) self.loaded_ws = LoadMask(Instrument='NOMAD', InputFile=str(file_xml_mask), RefWorkspace='NOM_144974', StoreInADS=False)
def _defaultMask(self, mainWS, wsNames, wsCleanup, report, algorithmLogging): """Load instrument specific default mask or return None if not available.""" option = self.getProperty(common.PROP_DEFAULT_MASK).value if option == common.DEFAULT_MASK_OFF: return None instrument = mainWS.getInstrument() instrumentName = instrument.getName() if not instrument.hasParameter('Workflow.MaskFile'): report.notice('No default mask available for ' + instrumentName + '.') return None maskFilename = instrument.getStringParameter('Workflow.MaskFile')[0] maskFile = os.path.join(mantid.config.getInstrumentDirectory(), 'masks', maskFilename) defaultMaskWSName = wsNames.withSuffix('default_mask') defaultMaskWS = LoadMask(Instrument=instrumentName, InputFile=maskFile, RefWorkspace=mainWS, OutputWorkspace=defaultMaskWSName, EnableLogging=algorithmLogging) report.notice('Default mask loaded from ' + maskFilename) return defaultMaskWS
def process_json(json_filename): """This will read a json file, process the data and save the calibration. Only ``Calibrant`` and ``Groups`` are required. An example input showing every possible options is: .. code-block:: JSON { "Calibrant": "12345", "Groups": "/path/to/groups.xml", "Mask": "/path/to/mask.xml", "Instrument": "NOM", "Date" : "2019_09_04", "SampleEnvironment": "shifter", "PreviousCalibration": "/path/to/cal.h5", "CalDirectory": "/path/to/output_directory", "CrossCorrelate": {"Step": 0.001, "DReference: 1.5, "Xmin": 1.0, "Xmax": 3.0, "MaxDSpaceShift": 0.25}, "PDCalibration": {"PeakPositions": [1, 2, 3], "TofBinning": (300,0.001,16666), "PeakFunction": 'Gaussian', "PeakWindow": 0.1, "PeakWidthPercent": 0.001} } """ with open(json_filename) as json_file: args = json.load(json_file) calibrant_file = args.get('CalibrantFile', None) if calibrant_file is None: calibrant = args['Calibrant'] groups = args['Groups'] out_groups_by = args.get('OutputGroupsBy', 'Group') sample_env = args.get('SampleEnvironment', 'UnknownSampleEnvironment') mask = args.get('Mask') instrument = args.get('Instrument', 'NOM') cc_kwargs = args.get('CrossCorrelate', {}) pdcal_kwargs = args.get('PDCalibration', {}) previous_calibration = args.get('PreviousCalibration') date = str(args.get('Date', datetime.datetime.now().strftime('%Y_%m_%d'))) caldirectory = str(args.get('CalDirectory', os.path.abspath('.'))) if calibrant_file is not None: ws = Load(calibrant_file) calibrant = ws.getRun().getProperty('run_number').value else: filename = f'{instrument}_{calibrant}' ws = Load(filename) calfilename = f'{caldirectory}/{instrument}_{calibrant}_{date}_{sample_env}.h5' logger.notice(f'going to create calibration file: {calfilename}') groups = LoadDetectorsGroupingFile(groups, InputWorkspace=ws) if mask: mask = LoadMask(instrument, mask) MaskDetectors(ws, MaskedWorkspace=mask) if previous_calibration: previous_calibration = LoadDiffCal(previous_calibration, MakeGroupingWorkspace=False, MakeMaskWorkspace=False) diffcal = do_group_calibration(ws, groups, previous_calibration, cc_kwargs=cc_kwargs, pdcal_kwargs=pdcal_kwargs) mask = mtd['group_calibration_pd_diffcal_mask'] CreateGroupingWorkspace(InputWorkspace=ws, GroupDetectorsBy=out_groups_by, OutputWorkspace='out_groups') SaveDiffCal(CalibrationWorkspace=diffcal, MaskWorkspace=mask, GroupingWorkspace=mtd['out_groups'], Filename=calfilename)
def PyExec(self): _background = bool(self.getProperty("Background").value) _load_inst = bool(self.getProperty("LoadInstrument").value) _norm_current = bool(self.getProperty("NormaliseByCurrent").value) _detcal = bool(self.getProperty("DetCal").value) _masking = bool(self.getProperty("MaskFile").value) _grouping = bool(self.getProperty("GroupingFile").value) _anvred = bool(self.getProperty("SphericalAbsorptionCorrection").value) _SA_name = self.getPropertyValue("SolidAngleOutputWorkspace") _Flux_name = self.getPropertyValue("FluxOutputWorkspace") XMin = self.getProperty("MomentumMin").value XMax = self.getProperty("MomentumMax").value rebin_param = ','.join([str(XMin), str(XMax), str(XMax)]) Load(Filename=self.getPropertyValue("Filename"), OutputWorkspace='__van', FilterByTofMin=self.getProperty("FilterByTofMin").value, FilterByTofMax=self.getProperty("FilterByTofMax").value) if _norm_current: NormaliseByCurrent(InputWorkspace='__van', OutputWorkspace='__van') if _background: Load(Filename=self.getProperty("Background").value, OutputWorkspace='__bkg', FilterByTofMin=self.getProperty("FilterByTofMin").value, FilterByTofMax=self.getProperty("FilterByTofMax").value) if _norm_current: NormaliseByCurrent(InputWorkspace='__bkg', OutputWorkspace='__bkg') else: pc_van = mtd['__van'].run().getProtonCharge() pc_bkg = mtd['__bkg'].run().getProtonCharge() mtd['__bkg'] *= pc_van / pc_bkg mtd['__bkg'] *= self.getProperty('BackgroundScale').value Minus(LHSWorkspace='__van', RHSWorkspace='__bkg', OutputWorkspace='__van') DeleteWorkspace('__bkg') if _load_inst: LoadInstrument(Workspace='__van', Filename=self.getProperty("LoadInstrument").value, RewriteSpectraMap=False) if _detcal: LoadIsawDetCal(InputWorkspace='__van', Filename=self.getProperty("DetCal").value) if _masking: LoadMask(Instrument=mtd['__van'].getInstrument().getName(), InputFile=self.getProperty("MaskFile").value, OutputWorkspace='__mask') MaskDetectors(Workspace='__van', MaskedWorkspace='__mask') DeleteWorkspace('__mask') ConvertUnits(InputWorkspace='__van', OutputWorkspace='__van', Target='Momentum') Rebin(InputWorkspace='__van', OutputWorkspace='__van', Params=rebin_param) CropWorkspace(InputWorkspace='__van', OutputWorkspace='__van', XMin=XMin, XMax=XMax) if _anvred: AnvredCorrection(InputWorkspace='__van', OutputWorkspace='__van', LinearScatteringCoef=self.getProperty( "LinearScatteringCoef").value, LinearAbsorptionCoef=self.getProperty( "LinearAbsorptionCoef").value, Radius=self.getProperty("Radius").value, OnlySphericalAbsorption='1', PowerLambda='0') # Create solid angle Rebin(InputWorkspace='__van', OutputWorkspace=_SA_name, Params=rebin_param, PreserveEvents=False) # Create flux if _grouping: GroupDetectors(InputWorkspace='__van', OutputWorkspace='__van', MapFile=self.getProperty("GroupingFile").value) else: SumSpectra(InputWorkspace='__van', OutputWorkspace='__van') Rebin(InputWorkspace='__van', OutputWorkspace='__van', Params=rebin_param) flux = mtd['__van'] for i in range(flux.getNumberHistograms()): el = flux.getSpectrum(i) if flux.readY(i)[0] > 0: el.divide(flux.readY(i)[0], flux.readE(i)[0]) SortEvents(InputWorkspace='__van', SortBy="X Value") IntegrateFlux(InputWorkspace='__van', OutputWorkspace=_Flux_name, NPoints=10000) DeleteWorkspace('__van') self.setProperty("SolidAngleOutputWorkspace", mtd[_SA_name]) self.setProperty("FluxOutputWorkspace", mtd[_Flux_name])
def int3samples(runs, name, masks, binning='0.5, 0.05, 8.0'): """ Finds the polarisation versus wavelength for a set of detector tubes. Parameters ---------- runs: list of RunData objects The runs whose polarisation we are interested in. name: string The name of this set of runs masks: list of string The file names of the masks for the sequential tubes that are being used for the SEMSANS measurements. binning: string The binning values to use for the wavelength bins. The default value is '0.5, 0.025, 10.0' """ for tube, _ in enumerate(masks): for i in [1, 2]: final_state = "{}_{}_{}".format(name, tube, i) if final_state in mtd.getObjectNames(): DeleteWorkspace(final_state) for rnum in runs: w1 = Load(BASE.format(rnum.number), LoadMonitors=True) w1mon = ExtractSingleSpectrum('w1_monitors', 0) w1 = ConvertUnits('w1', 'Wavelength', AlignBins=1) w1mon = ConvertUnits(w1mon, 'Wavelength') w1 = Rebin(w1, binning, PreserveEvents=False) w1mon = Rebin(w1mon, binning) w1 = w1 / w1mon for tube, mask in enumerate(masks): Mask_Tube = LoadMask('LARMOR', mask) w1temp = CloneWorkspace(w1) MaskDetectors(w1temp, MaskedWorkspace="Mask_Tube") Tube_Sum = SumSpectra(w1temp) for i in [1, 2]: final_state = "{}_{}_{}".format(name, tube, i) if final_state in mtd.getObjectNames(): mtd[final_state] += mtd["Tube_Sum_{}".format(i)] else: mtd[final_state] = mtd["Tube_Sum_{}".format(i)] x = mtd["{}_0_1".format(name)].extractX()[0] dx = (x[1:] + x[:-1]) / 2 pols = [] for run in runs: he_stat = he3_stats(run) start = (run.start - he_stat.dt).seconds / 3600 / he_stat.t1 end = (run.end - he_stat.dt).seconds / 3600 / he_stat.t1 for time in np.linspace(start, end, 10): temp = he3pol(he_stat.scale, time)(dx) pols.append(temp) wpol = CreateWorkspace( x, np.mean(pols, axis=0), # and the blank UnitX="Wavelength", YUnitLabel="Counts") for tube, _ in enumerate(masks): up = mtd["{}_{}_2".format(name, tube)] dn = mtd["{}_{}_1".format(name, tube)] pol = (up - dn) / (up + dn) pol /= wpol DeleteWorkspaces( ["{}_{}_{}".format(name, tube, i) for i in range(1, 3)]) RenameWorkspace("pol", OutputWorkspace="{}_{}".format(name, tube)) DeleteWorkspaces(["Tube_Sum_1", "Tube_Sum_2"]) GroupWorkspaces([ "{}_{}".format(name, tube) for tube, _ in enumerate(masks) for i in range(1, 3) ], OutputWorkspace=str(name))
def PyExec(self): # Facility and database configuration config_new_options = { 'default.facility': 'SNS', 'default.instrument': 'BASIS', 'datasearch.searcharchive': 'On' } # Find valid incoming momentum range self._lambda_range = np.array(self.getProperty('LambdaRange').value) self._momentum_range = np.sort(2 * np.pi / self._lambda_range) # implement with ContextDecorator after python2 is deprecated) with pyexec_setup(config_new_options) as self._temps: # Load the mask to a temporary workspace self._t_mask = LoadMask( Instrument='BASIS', InputFile=self.getProperty('MaskFile').value, OutputWorkspace='_t_mask') # Pre-process the background runs if self.getProperty('BackgroundRuns').value: bkg_run_numbers = self._getRuns( self.getProperty('BackgroundRuns').value, doIndiv=True) bkg_run_numbers = \ list(itertools.chain.from_iterable(bkg_run_numbers)) background_reporter = Progress(self, start=0.0, end=1.0, nreports=len(bkg_run_numbers)) for i, run in enumerate(bkg_run_numbers): if self._bkg is None: self._bkg = self._mask_t0_crop(run, '_bkg') self._temps.workspaces.append('_bkg') else: _ws = self._mask_t0_crop(run, '_ws') self._bkg += _ws if '_ws' not in self._temps.workspaces: self._temps.workspaces.append('_ws') message = 'Pre-processing background: {} of {}'.\ format(i+1, len(bkg_run_numbers)) background_reporter.report(message) SetGoniometer(self._bkg, Axis0='0,0,1,0,1') self._bkg_scale = self.getProperty('BackgroundScale').value background_reporter.report(len(bkg_run_numbers), 'Done') # Pre-process the vanadium run(s) by removing the delayed # emission time from the moderator and then saving to file(s) if self.getProperty('VanadiumRuns').value: run_numbers = self._getRuns( self.getProperty('VanadiumRuns').value, doIndiv=True) run_numbers = list(itertools.chain.from_iterable(run_numbers)) vanadium_reporter = Progress(self, start=0.0, end=1.0, nreports=len(run_numbers)) self._vanadium_files = list() for i, run in enumerate(run_numbers): self._vanadium_files.append(self._save_t0(run)) message = 'Pre-processing vanadium: {} of {}'. \ format(i+1, len(run_numbers)) vanadium_reporter.report(message) vanadium_reporter.report(len(run_numbers), 'Done') # Determination of single crystal diffraction self._determine_single_crystal_diffraction()
def PyExec(self): # remove possible old temp workspaces [ DeleteWorkspace(ws) for ws in self.temp_workspace_list if mtd.doesExist(ws) ] _background = bool(self.getProperty("Background").value) _load_inst = bool(self.getProperty("LoadInstrument").value) _detcal = bool(self.getProperty("DetCal").value) _masking = bool(self.getProperty("MaskFile").value) _outWS_name = self.getPropertyValue("OutputWorkspace") UBList = self._generate_UBList() dim0_min, dim0_max, dim0_bins = self.getProperty('BinningDim0').value dim1_min, dim1_max, dim1_bins = self.getProperty('BinningDim1').value dim2_min, dim2_max, dim2_bins = self.getProperty('BinningDim2').value MinValues = "{},{},{}".format(dim0_min, dim1_min, dim2_min) MaxValues = "{},{},{}".format(dim0_max, dim1_max, dim2_max) AlignedDim0 = ",{},{},{}".format(dim0_min, dim0_max, int(dim0_bins)) AlignedDim1 = ",{},{},{}".format(dim1_min, dim1_max, int(dim1_bins)) AlignedDim2 = ",{},{},{}".format(dim2_min, dim2_max, int(dim2_bins)) LoadNexus(Filename=self.getProperty("SolidAngle").value, OutputWorkspace='__sa') LoadNexus(Filename=self.getProperty("Flux").value, OutputWorkspace='__flux') if _masking: LoadMask(Instrument=mtd['__sa'].getInstrument().getName(), InputFile=self.getProperty("MaskFile").value, OutputWorkspace='__mask') MaskDetectors(Workspace='__sa', MaskedWorkspace='__mask') DeleteWorkspace('__mask') XMin = mtd['__sa'].getXDimension().getMinimum() XMax = mtd['__sa'].getXDimension().getMaximum() if _background: Load(Filename=self.getProperty("Background").value, OutputWorkspace='__bkg', FilterByTofMin=self.getProperty("FilterByTofMin").value, FilterByTofMax=self.getProperty("FilterByTofMax").value) if _load_inst: LoadInstrument( Workspace='__bkg', Filename=self.getProperty("LoadInstrument").value, RewriteSpectraMap=False) if _detcal: LoadIsawDetCal(InputWorkspace='__bkg', Filename=self.getProperty("DetCal").value) MaskDetectors(Workspace='__bkg', MaskedWorkspace='__sa') ConvertUnits(InputWorkspace='__bkg', OutputWorkspace='__bkg', Target='Momentum') CropWorkspace(InputWorkspace='__bkg', OutputWorkspace='__bkg', XMin=XMin, XMax=XMax) progress = Progress( self, 0.0, 1.0, len(UBList) * len(self.getProperty("Filename").value)) for run in self.getProperty("Filename").value: logger.notice("Working on " + run) Load(Filename=run, OutputWorkspace='__run', FilterByTofMin=self.getProperty("FilterByTofMin").value, FilterByTofMax=self.getProperty("FilterByTofMax").value) if _load_inst: LoadInstrument( Workspace='__run', Filename=self.getProperty("LoadInstrument").value, RewriteSpectraMap=False) if _detcal: LoadIsawDetCal(InputWorkspace='__run', Filename=self.getProperty("DetCal").value) MaskDetectors(Workspace='__run', MaskedWorkspace='__sa') ConvertUnits(InputWorkspace='__run', OutputWorkspace='__run', Target='Momentum') CropWorkspace(InputWorkspace='__run', OutputWorkspace='__run', XMin=XMin, XMax=XMax) if self.getProperty('SetGoniometer').value: SetGoniometer( Workspace='__run', Goniometers=self.getProperty('Goniometers').value, Axis0=self.getProperty('Axis0').value, Axis1=self.getProperty('Axis1').value, Axis2=self.getProperty('Axis2').value) # Set background Goniometer to be the same as data if _background: mtd['__bkg'].run().getGoniometer().setR( mtd['__run'].run().getGoniometer().getR()) for ub in UBList: SetUB(Workspace='__run', UB=ub) ConvertToMD(InputWorkspace='__run', OutputWorkspace='__md', QDimensions='Q3D', dEAnalysisMode='Elastic', Q3DFrames='HKL', QConversionScales='HKL', Uproj=self.getProperty('Uproj').value, Vproj=self.getProperty('Vproj').value, Wproj=self.getProperty('wproj').value, MinValues=MinValues, MaxValues=MaxValues) MDNormSCD( InputWorkspace=mtd['__md'], FluxWorkspace='__flux', SolidAngleWorkspace='__sa', OutputWorkspace='__data', SkipSafetyCheck=True, TemporaryDataWorkspace='__data' if mtd.doesExist('__data') else None, OutputNormalizationWorkspace='__norm', TemporaryNormalizationWorkspace='__norm' if mtd.doesExist('__norm') else None, AlignedDim0=mtd['__md'].getDimension(0).name + AlignedDim0, AlignedDim1=mtd['__md'].getDimension(1).name + AlignedDim1, AlignedDim2=mtd['__md'].getDimension(2).name + AlignedDim2) DeleteWorkspace('__md') if _background: SetUB(Workspace='__bkg', UB=ub) ConvertToMD(InputWorkspace='__bkg', OutputWorkspace='__bkg_md', QDimensions='Q3D', dEAnalysisMode='Elastic', Q3DFrames='HKL', QConversionScales='HKL', Uproj=self.getProperty('Uproj').value, Vproj=self.getProperty('Vproj').value, Wproj=self.getProperty('Wproj').value, MinValues=MinValues, MaxValues=MaxValues) MDNormSCD( InputWorkspace='__bkg_md', FluxWorkspace='__flux', SolidAngleWorkspace='__sa', SkipSafetyCheck=True, OutputWorkspace='__bkg_data', TemporaryDataWorkspace='__bkg_data' if mtd.doesExist('__bkg_data') else None, OutputNormalizationWorkspace='__bkg_norm', TemporaryNormalizationWorkspace='__bkg_norm' if mtd.doesExist('__bkg_norm') else None, AlignedDim0=mtd['__bkg_md'].getDimension(0).name + AlignedDim0, AlignedDim1=mtd['__bkg_md'].getDimension(1).name + AlignedDim1, AlignedDim2=mtd['__bkg_md'].getDimension(2).name + AlignedDim2) DeleteWorkspace('__bkg_md') progress.report() DeleteWorkspace('__run') if _background: # outWS = data / norm - bkg_data / bkg_norm * BackgroundScale DivideMD(LHSWorkspace='__data', RHSWorkspace='__norm', OutputWorkspace=_outWS_name + '_normalizedData') DivideMD(LHSWorkspace='__bkg_data', RHSWorkspace='__bkg_norm', OutputWorkspace=_outWS_name + '_normalizedBackground') CreateSingleValuedWorkspace( OutputWorkspace='__scale', DataValue=self.getProperty('BackgroundScale').value) MultiplyMD(LHSWorkspace=_outWS_name + '_normalizedBackground', RHSWorkspace='__scale', OutputWorkspace='__scaled_background') DeleteWorkspace('__scale') MinusMD(LHSWorkspace=_outWS_name + '_normalizedData', RHSWorkspace='__scaled_background', OutputWorkspace=_outWS_name) if self.getProperty('KeepTemporaryWorkspaces').value: RenameWorkspaces(InputWorkspaces=[ '__data', '__norm', '__bkg_data', '__bkg_norm' ], WorkspaceNames=[ _outWS_name + '_data', _outWS_name + '_normalization', _outWS_name + '_background_data', _outWS_name + '_background_normalization' ]) else: # outWS = data / norm DivideMD(LHSWorkspace='__data', RHSWorkspace='__norm', OutputWorkspace=_outWS_name) if self.getProperty('KeepTemporaryWorkspaces').value: RenameWorkspaces(InputWorkspaces=['__data', '__norm'], WorkspaceNames=[ _outWS_name + '_data', _outWS_name + '_normalization' ]) self.setProperty("OutputWorkspace", mtd[_outWS_name]) # remove temp workspaces [ DeleteWorkspace(ws) for ws in self.temp_workspace_list if mtd.doesExist(ws) ]
def PyExec(self): # remove possible old temp workspaces [ DeleteWorkspace(ws) for ws in self.temp_workspace_list if mtd.doesExist(ws) ] _background = bool(self.getProperty("Background").value) self._load_inst = bool(self.getProperty("LoadInstrument").value) self._apply_cal = bool(self.getProperty("ApplyCalibration").value) self._detcal = bool(self.getProperty("DetCal").value) self._copy_params = bool( self.getProperty("CopyInstrumentParameters").value) _masking = bool(self.getProperty("MaskFile").value) _outWS_name = self.getPropertyValue("OutputWorkspace") _UB = self.getProperty("UBMatrix").value if len(_UB) == 1: _UB = np.tile(_UB, len(self.getProperty("Filename").value)) _offsets = self.getProperty("OmegaOffset").value if len(_offsets) == 0: _offsets = np.zeros(len(self.getProperty("Filename").value)) if self.getProperty("ReuseSAFlux").value and mtd.doesExist( '__sa') and mtd.doesExist('__flux'): logger.notice( "Reusing previously loaded SolidAngle and Flux workspaces. " "Set ReuseSAFlux to False if new files are selected or you change the momentum range." ) else: logger.notice("Loading SolidAngle and Flux from file") LoadNexus(Filename=self.getProperty("SolidAngle").value, OutputWorkspace='__sa') LoadNexus(Filename=self.getProperty("Flux").value, OutputWorkspace='__flux') if _masking: LoadMask(Instrument=mtd['__sa'].getInstrument().getName(), InputFile=self.getProperty("MaskFile").value, OutputWorkspace='__mask') MaskDetectors(Workspace='__sa', MaskedWorkspace='__mask') DeleteWorkspace('__mask') self.XMin = mtd['__sa'].getXDimension().getMinimum() self.XMax = mtd['__sa'].getXDimension().getMaximum() newXMin = self.getProperty("MomentumMin").value newXMax = self.getProperty("MomentumMax").value if newXMin != Property.EMPTY_DBL or newXMax != Property.EMPTY_DBL: if newXMin != Property.EMPTY_DBL: self.XMin = max(self.XMin, newXMin) if newXMax != Property.EMPTY_DBL: self.XMax = min(self.XMax, newXMax) logger.notice("Using momentum range {} to {} A^-1".format( self.XMin, self.XMax)) CropWorkspace(InputWorkspace='__flux', OutputWorkspace='__flux', XMin=self.XMin, XMax=self.XMax) for spectrumNumber in range(mtd['__flux'].getNumberHistograms()): Y = mtd['__flux'].readY(spectrumNumber) mtd['__flux'].setY(spectrumNumber, (Y - Y.min()) / (Y.max() - Y.min())) MinValues = [-self.XMax * 2] * 3 MaxValues = [self.XMax * 2] * 3 if _background: self.load_file_and_apply( self.getProperty("Background").value, '__bkg', 0) progress = Progress(self, 0.0, 1.0, len(self.getProperty("Filename").value)) for n, run in enumerate(self.getProperty("Filename").value): logger.notice("Working on " + run) self.load_file_and_apply(run, '__run', _offsets[n]) LoadIsawUB('__run', _UB[n]) ConvertToMD(InputWorkspace='__run', OutputWorkspace='__md', QDimensions='Q3D', dEAnalysisMode='Elastic', Q3DFrames='Q_sample', MinValues=MinValues, MaxValues=MaxValues) RecalculateTrajectoriesExtents(InputWorkspace='__md', OutputWorkspace='__md') MDNorm( InputWorkspace='__md', FluxWorkspace='__flux', SolidAngleWorkspace='__sa', OutputDataWorkspace='__data', TemporaryDataWorkspace='__data' if mtd.doesExist('__data') else None, OutputNormalizationWorkspace='__norm', TemporaryNormalizationWorkspace='__norm' if mtd.doesExist('__norm') else None, OutputWorkspace=_outWS_name, QDimension0=self.getProperty('QDimension0').value, QDimension1=self.getProperty('QDimension1').value, QDimension2=self.getProperty('QDimension2').value, Dimension0Binning=self.getProperty('Dimension0Binning').value, Dimension1Binning=self.getProperty('Dimension1Binning').value, Dimension2Binning=self.getProperty('Dimension2Binning').value, SymmetryOperations=self.getProperty( 'SymmetryOperations').value) DeleteWorkspace('__md') if _background: # Set background Goniometer and UB to be the same as data CopySample(InputWorkspace='__run', OutputWorkspace='__bkg', CopyName=False, CopyMaterial=False, CopyEnvironment=False, CopyShape=False, CopyLattice=True) mtd['__bkg'].run().getGoniometer().setR( mtd['__run'].run().getGoniometer().getR()) ConvertToMD(InputWorkspace='__bkg', OutputWorkspace='__bkg_md', QDimensions='Q3D', dEAnalysisMode='Elastic', Q3DFrames='Q_sample', MinValues=MinValues, MaxValues=MaxValues) RecalculateTrajectoriesExtents(InputWorkspace='__bkg_md', OutputWorkspace='__bkg_md') MDNorm(InputWorkspace='__bkg_md', FluxWorkspace='__flux', SolidAngleWorkspace='__sa', OutputDataWorkspace='__bkg_data', TemporaryDataWorkspace='__bkg_data' if mtd.doesExist('__bkg_data') else None, OutputNormalizationWorkspace='__bkg_norm', TemporaryNormalizationWorkspace='__bkg_norm' if mtd.doesExist('__bkg_norm') else None, OutputWorkspace='__normalizedBackground', QDimension0=self.getProperty('QDimension0').value, QDimension1=self.getProperty('QDimension1').value, QDimension2=self.getProperty('QDimension2').value, Dimension0Binning=self.getProperty( 'Dimension0Binning').value, Dimension1Binning=self.getProperty( 'Dimension1Binning').value, Dimension2Binning=self.getProperty( 'Dimension2Binning').value, SymmetryOperations=self.getProperty( 'SymmetryOperations').value) DeleteWorkspace('__bkg_md') progress.report() DeleteWorkspace('__run') if _background: # outWS = data / norm - bkg_data / bkg_norm * BackgroundScale CreateSingleValuedWorkspace( OutputWorkspace='__scale', DataValue=self.getProperty('BackgroundScale').value) MultiplyMD(LHSWorkspace='__normalizedBackground', RHSWorkspace='__scale', OutputWorkspace='__normalizedBackground') DeleteWorkspace('__scale') MinusMD(LHSWorkspace=_outWS_name, RHSWorkspace='__normalizedBackground', OutputWorkspace=_outWS_name) if self.getProperty('KeepTemporaryWorkspaces').value: RenameWorkspaces(InputWorkspaces=[ '__data', '__norm', '__bkg_data', '__bkg_norm' ], WorkspaceNames=[ _outWS_name + '_data', _outWS_name + '_normalization', _outWS_name + '_background_data', _outWS_name + '_background_normalization' ]) else: if self.getProperty('KeepTemporaryWorkspaces').value: RenameWorkspaces(InputWorkspaces=['__data', '__norm'], WorkspaceNames=[ _outWS_name + '_data', _outWS_name + '_normalization' ]) self.setProperty("OutputWorkspace", mtd[_outWS_name]) # remove temp workspaces [ DeleteWorkspace(ws) for ws in self.temp_workspace_list if mtd.doesExist(ws) ]
def PyExec(self): _load_inst = bool(self.getProperty("LoadInstrument").value) _detcal = bool(self.getProperty("DetCal").value) _masking = bool(self.getProperty("MaskFile").value) _outWS_name = self.getPropertyValue("OutputWorkspace") _UB = bool(self.getProperty("UBMatrix").value) MinValues = self.getProperty("MinValues").value MaxValues = self.getProperty("MaxValues").value if self.getProperty("OverwriteExisting").value: if mtd.doesExist(_outWS_name): DeleteWorkspace(_outWS_name) progress = Progress(self, 0.0, 1.0, len(self.getProperty("Filename").value)) for run in self.getProperty("Filename").value: logger.notice("Working on " + run) Load(Filename=run, OutputWorkspace='__run', FilterByTofMin=self.getProperty("FilterByTofMin").value, FilterByTofMax=self.getProperty("FilterByTofMax").value, FilterByTimeStop=self.getProperty("FilterByTimeStop").value) if _load_inst: LoadInstrument( Workspace='__run', Filename=self.getProperty("LoadInstrument").value, RewriteSpectraMap=False) if _detcal: LoadIsawDetCal(InputWorkspace='__run', Filename=self.getProperty("DetCal").value) if _masking: if not mtd.doesExist('__mask'): LoadMask(Instrument=mtd['__run'].getInstrument().getName(), InputFile=self.getProperty("MaskFile").value, OutputWorkspace='__mask') MaskDetectors(Workspace='__run', MaskedWorkspace='__mask') if self.getProperty('SetGoniometer').value: SetGoniometer( Workspace='__run', Goniometers=self.getProperty('Goniometers').value, Axis0=self.getProperty('Axis0').value, Axis1=self.getProperty('Axis1').value, Axis2=self.getProperty('Axis2').value) if _UB: LoadIsawUB(InputWorkspace='__run', Filename=self.getProperty("UBMatrix").value) if len(MinValues) == 0 or len(MaxValues) == 0: MinValues, MaxValues = ConvertToMDMinMaxGlobal( '__run', dEAnalysisMode='Elastic', Q3DFrames='HKL', QDimensions='Q3D') ConvertToMD( InputWorkspace='__run', OutputWorkspace=_outWS_name, QDimensions='Q3D', dEAnalysisMode='Elastic', Q3DFrames='HKL', QConversionScales='HKL', Uproj=self.getProperty('Uproj').value, Vproj=self.getProperty('Vproj').value, Wproj=self.getProperty('Wproj').value, MinValues=MinValues, MaxValues=MaxValues, SplitInto=self.getProperty('SplitInto').value, SplitThreshold=self.getProperty('SplitThreshold').value, MaxRecursionDepth=self.getProperty( 'MaxRecursionDepth').value, OverwriteExisting=False) else: if len(MinValues) == 0 or len(MaxValues) == 0: MinValues, MaxValues = ConvertToMDMinMaxGlobal( '__run', dEAnalysisMode='Elastic', Q3DFrames='Q', QDimensions='Q3D') ConvertToMD( InputWorkspace='__run', OutputWorkspace=_outWS_name, QDimensions='Q3D', dEAnalysisMode='Elastic', Q3DFrames='Q_sample', Uproj=self.getProperty('Uproj').value, Vproj=self.getProperty('Vproj').value, Wproj=self.getProperty('Wproj').value, MinValues=MinValues, MaxValues=MaxValues, SplitInto=self.getProperty('SplitInto').value, SplitThreshold=self.getProperty('SplitThreshold').value, MaxRecursionDepth=self.getProperty( 'MaxRecursionDepth').value, OverwriteExisting=False) DeleteWorkspace('__run') progress.report() if mtd.doesExist('__mask'): DeleteWorkspace('__mask') self.setProperty("OutputWorkspace", mtd[_outWS_name])