def PyExec(self): in_ws = self.getProperty("InputWorkspace").value patch_ws = self.getProperty("PatchWorkspace").value component_name = self.getProperty("ComponentName").value component = self.__get_component_to_patch(in_ws, component_name) number_of_tubes = component.nelements() for tube_idx in range(number_of_tubes): if component[0].nelements() <=1: # Handles EQSANS tube = component[tube_idx][0] else: # Handles Biosans/GPSANS tube = component[tube_idx] self.__patch_workspace(tube, in_ws, patch_ws) api.ClearMaskFlag(Workspace=in_ws, ComponentName=component_name)
def PyExec(self): config['default.facility'] = "SNS" config['default.instrument'] = self._long_inst self._doIndiv = self.getProperty("DoIndividual").value self._etBins = self.getProperty( "EnergyBins").value / MICROEV_TO_MILLIEV self._qBins = self.getProperty("MomentumTransferBins").value self._noMonNorm = self.getProperty("NoMonitorNorm").value self._maskFile = self.getProperty("MaskFile").value self._groupDetOpt = self.getProperty("GroupDetectors").value self._normalizeToFirst = self.getProperty("NormalizeToFirst").value self._normalizeToVanadium = self.getProperty("GroupDetectors").value self._doNorm = self.getProperty("DivideByVanadium").value datasearch = config["datasearch.searcharchive"] if datasearch != "On": config["datasearch.searcharchive"] = "On" # Handle masking file override if necessary self._overrideMask = bool(self._maskFile) if not self._overrideMask: config.appendDataSearchDir(DEFAULT_MASK_GROUP_DIR) self._maskFile = DEFAULT_MASK_FILE api.LoadMask(Instrument='BASIS', OutputWorkspace='BASIS_MASK', InputFile=self._maskFile) # Work around length issue _dMask = api.ExtractMask('BASIS_MASK') self._dMask = _dMask[1] api.DeleteWorkspace(_dMask[0]) ############################ ## Process the Vanadium ## ############################ norm_runs = self.getProperty("NormRunNumbers").value if self._doNorm and bool(norm_runs): if ";" in norm_runs: raise SyntaxError("Normalization does not support run groups") self._doNorm = self.getProperty("NormalizationType").value self.log().information("Divide by Vanadium with normalization" + self._doNorm) # The following steps are common to all types of Vanadium normalization # norm_runs encompasses a single set, thus _getRuns returns # a list of only one item norm_set = self._getRuns(norm_runs, doIndiv=False)[0] self._normWs = self._sum_and_calibrate(norm_set, extra_extension="_norm") # This rebin integrates counts onto a histogram of a single bin if self._doNorm == "by detectorID": normRange = self.getProperty("NormWavelengthRange").value self._normRange = [ normRange[0], normRange[1] - normRange[0], normRange[1] ] api.Rebin(InputWorkspace=self._normWs, OutputWorkspace=self._normWs, Params=self._normRange) # FindDetectorsOutsideLimits to be substituted by MedianDetectorTest api.FindDetectorsOutsideLimits(InputWorkspace=self._normWs, OutputWorkspace="BASIS_NORM_MASK") # additional reduction steps when normalizing by Q slice if self._doNorm == "by Q slice": self._normWs = self._group_and_SofQW(self._normWs, self._etBins, isSample=False) ########################## ## Process the sample ## ########################## self._run_list = self._getRuns(self.getProperty("RunNumbers").value, doIndiv=self._doIndiv) for run_set in self._run_list: self._samWs = self._sum_and_calibrate(run_set) self._samWsRun = str(run_set[0]) # Mask detectors with insufficient Vanadium signal if self._doNorm: api.MaskDetectors(Workspace=self._samWs, MaskedWorkspace='BASIS_NORM_MASK') # Divide by Vanadium if self._doNorm == "by detector ID": api.Divide(LHSWorkspace=self._samWs, RHSWorkspace=self._normWs, OutputWorkspace=self._samWs) # additional reduction steps self._samSqwWs = self._group_and_SofQW(self._samWs, self._etBins, isSample=True) # Divide by Vanadium if self._doNorm == "by Q slice": api.Integration(InputWorkspace=self._normWs, OutputWorkspace=self._normWs, RangeLower=DEFAULT_VANADIUM_ENERGY_RANGE[0], RangeUpper=DEFAULT_VANADIUM_ENERGY_RANGE[1]) api.Divide(LHSWorkspace=self._samSqwWs, RHSWorkspace=self._normWs, OutputWorkspace=self._samSqwWs) # Clear mask from reduced file. Needed for binary operations # involving this S(Q,w) api.ClearMaskFlag(Workspace=self._samSqwWs) # Scale so that elastic line has Y-values ~ 1 if self._normalizeToFirst: self._ScaleY(self._samSqwWs) # Output Dave and Nexus files extension = "_divided.dat" if self._doNorm else ".dat" dave_grp_filename = self._makeRunName(self._samWsRun, False) + extension api.SaveDaveGrp(Filename=dave_grp_filename, InputWorkspace=self._samSqwWs, ToMicroEV=True) extension = "_divided_sqw.nxs" if self._doNorm else "_sqw.nxs" processed_filename = self._makeRunName(self._samWsRun, False) + extension api.SaveNexus(Filename=processed_filename, InputWorkspace=self._samSqwWs)
def PyExec(self): self._runs = self.getProperty('RunNumbers').value self._vanfile = self.getProperty('Vanadium').value self._ecruns = self.getProperty('EmptyCanRunNumbers').value self._ebins = (self.getProperty('EnergyBins').value).tolist() self._qbins = (self.getProperty('MomentumTransferBins').value).tolist() self._snorm = self.getProperty('NormalizeSlices').value self._clean = self.getProperty('CleanWorkspaces').value wn_sqes = self.getPropertyValue("OutputWorkspace") # workspace names prefix = '' if self._clean: prefix = '__' # "wn" denotes workspace name wn_data = prefix + 'data' # Accumulated data events wn_data_mon = prefix + 'data_monitors' # Accumulated monitors for data wn_van = prefix + 'vanadium' # White-beam vanadium wn_van_st = prefix + 'vanadium_S_theta' wn_reduced = prefix + 'reduced' # data after DGSReduction wn_ste = prefix + 'S_theta_E' # data after grouping by theta angle wn_sten = prefix + 'S_theta_E_normalized' wn_steni = prefix + 'S_theta_E_interp' wn_sqe = prefix + 'S_Q_E' wn_sqeb = prefix + 'S_Q_E_binned' wn_sqesn = prefix + wn_sqes + '_norm' # Empty can files wn_ec_data = prefix + 'ec_data' # Accumulated empty can data wn_ec_data_mon = prefix + 'ec_data_monitors' # Accumulated monitors for empty can wn_ec_reduced = prefix + 'ec_reduced' # empty can data after DGSReduction wn_ec_ste = prefix + 'ec_S_theta_E' # empty can data after grouping by theta angle # Save current configuration facility = config['default.facility'] instrument = config['default.instrument'] datasearch = config["datasearch.searcharchive"] # Allows searching for ARCS run numbers config['default.facility'] = 'SNS' config['default.instrument'] = 'ARCS' config["datasearch.searcharchive"] = "On" try: # Load the vanadium file, assumed to be preprocessed, meaning that # for every detector all events within a particular wide wavelength # range have been rebinned into a single histogram self._load(self._vanfile, wn_van) # Check for white-beam vanadium, true if the vertical chopper is absent (vChTrans==2) if api.mtd[wn_van].run().getProperty('vChTrans').value[0] != 2: raise ValueError("White-vanadium is required") # Load several event files into a single workspace. The nominal incident # energy should be the same to avoid difference in energy resolution self._load(self._runs, wn_data) # Load empty can event files, if present if self._ecruns: self._load(self._ecruns, wn_ec_data) finally: # Recover the default configuration config['default.facility'] = facility config['default.instrument'] = instrument config["datasearch.searcharchive"] = datasearch # Obtain incident energy as the mean of the nominal Ei values. # There is one nominal value for each run number. ws_data = sapi.mtd[wn_data] Ei = ws_data.getRun()['EnergyRequest'].getStatistics().mean Ei_std = ws_data.getRun()['EnergyRequest'].getStatistics( ).standard_deviation # Verify empty can runs were obtained at similar energy if self._ecruns: ws_ec_data = sapi.mtd[wn_ec_data] ec_Ei = ws_ec_data.getRun()['EnergyRequest'].getStatistics().mean if abs(Ei - ec_Ei) > Ei_std: raise RuntimeError( 'Empty can runs were obtained at a significant' + ' different incident energy than the sample runs') # Obtain energy range. If user did not supply a triad # [Estart, Ewidth, Eend] but only Ewidth, then estimate # Estart and End from the nominal energies if len(self._ebins) == 1: ws_data = sapi.mtd[wn_data] Ei = ws_data.getRun()['EnergyRequest'].getStatistics().mean self._ebins.insert(0, -0.5 * Ei) # prepend self._ebins.append(0.95 * Ei) # append # Enforce that the elastic energy (E=0) lies in the middle of the # central bin with an appropriate small shift in the energy range Ei_min_reduced = self._ebins[0] / self._ebins[1] remainder = Ei_min_reduced - int(Ei_min_reduced) if remainder >= 0.0: erange_shift = self._ebins[1] * (0.5 - remainder) else: erange_shift = self._ebins[1] * (-0.5 - remainder) self._ebins[0] += erange_shift # shift minimum energy self._ebins[-1] += erange_shift # shift maximum energy # Convert to energy transfer. Normalize by proton charge. # The output workspace is S(detector-id,E) factor = 0.1 # use a finer energy bin than the one passed (self._ebins[1]) Erange = '{0},{1},{2}'.format(self._ebins[0], factor * self._ebins[1], self._ebins[2]) Ei_calc, T0 = sapi.GetEiT0atSNS(MonitorWorkspace=wn_data_mon, IncidentEnergyGuess=Ei) sapi.MaskDetectors(Workspace=wn_data, MaskedWorkspace=wn_van) # Use vanadium mask sapi.DgsReduction(SampleInputWorkspace=wn_data, SampleInputMonitorWorkspace=wn_data_mon, IncidentEnergyGuess=Ei_calc, UseIncidentEnergyGuess=1, TimeZeroGuess=T0, EnergyTransferRange=Erange, IncidentBeamNormalisation='ByCurrent', OutputWorkspace=wn_reduced) if self._ecruns: sapi.MaskDetectors(Workspace=wn_ec_data, MaskedWorkspace=wn_van) sapi.DgsReduction(SampleInputWorkspace=wn_ec_data, SampleInputMonitorWorkspace=wn_ec_data_mon, IncidentEnergyGuess=Ei_calc, UseIncidentEnergyGuess=1, TimeZeroGuess=T0, EnergyTransferRange=Erange, IncidentBeamNormalisation='ByCurrent', OutputWorkspace=wn_ec_reduced) # Obtain maximum and minimum |Q| values, as well as dQ if none passed if len(self._qbins) < 3: if not self._qbins: # insert dQ if empty qbins. The minimal momentum transfer # is the result on an event where the initial energy was # Ei and the final energy was Ei+dE. dE = self._ebins[1] self._qbins.append( numpy.sqrt((Ei + dE) / ENERGY_TO_WAVEVECTOR) - numpy.sqrt(Ei / ENERGY_TO_WAVEVECTOR)) mins, maxs = sapi.ConvertToMDMinMaxLocal(wn_reduced, Qdimensions='|Q|', dEAnalysisMode='Direct') self._qbins.insert(0, mins[0]) # prepend minimum Q self._qbins.append(maxs[0]) # append maximum Q # Delete sample and empty can event workspaces to free memory. if self._clean: sapi.DeleteWorkspace(wn_data) if self._ecruns: sapi.DeleteWorkspace(wn_ec_data) # Convert to S(theta,E) ki = numpy.sqrt(Ei / ENERGY_TO_WAVEVECTOR) # If dE is the smallest energy transfer considered, # then dQ/ki is the smallest dtheta (in radians) dtheta = self._qbins[1] / ki * (180.0 / numpy.pi) # Use a finer dtheta that the nominal smallest value factor = 1. / 5 # a reasonable (heuristic) value dtheta *= factor # Fix: a very small dtheta (<0.15 degrees) prevents correct interpolation dtheta = max(0.15, dtheta) # Group detectors according to theta angle for the sample runs group_file_os_handle, group_file_name = mkstemp(suffix='.xml') group_file_handle = os.fdopen(group_file_os_handle, 'w') sapi.GenerateGroupingPowder(InputWorkspace=wn_reduced, AngleStep=dtheta, GroupingFilename=group_file_name) group_file_handle.close() sapi.GroupDetectors(InputWorkspace=wn_reduced, MapFile=group_file_name, OutputWorkspace=wn_ste) # Group detectors according to theta angle for the emtpy can run if self._ecruns: sapi.GroupDetectors(InputWorkspace=wn_ec_reduced, MapFile=group_file_name, OutputWorkspace=wn_ec_ste) # Subtract the empty can from the can+sample sapi.Minus(LHSWorkspace=wn_ste, RHSWorkspace=wn_ec_ste, OutputWorkspace=wn_ste) # Normalize by the vanadium intensity, but before that we need S(theta) # for the vanadium. Recall every detector has all energies into a single # bin, so we get S(theta) instead of S(theta,E) sapi.GroupDetectors(InputWorkspace=wn_van, MapFile=group_file_name, OutputWorkspace=wn_van_st) # Divide by vanadium. Make sure it is integrated in the energy domain sapi.Integration(wn_van_st, OutputWorkspace=wn_van_st) sapi.Divide(wn_ste, wn_van_st, OutputWorkspace=wn_sten) sapi.ClearMaskFlag(Workspace=wn_sten) # Temporary file generated by GenerateGroupingPowder to be removed os.remove(group_file_name) # no need for this file os.remove(os.path.splitext(group_file_name)[0] + ".par") max_i_theta = 0.0 min_i_theta = 0.0 # Linear interpolation for those theta values with low intensity # First, find minimum theta index with a non-zero histogram ws_sten = sapi.mtd[wn_sten] for i_theta in range(ws_sten.getNumberHistograms()): if ws_sten.dataY(i_theta).any(): min_i_theta = i_theta break # second, find maximum theta with a non-zero histogram for i_theta in range(ws_sten.getNumberHistograms() - 1, -1, -1): if ws_sten.dataY(i_theta).any(): max_i_theta = i_theta break # Scan a range of theta angles and apply interpolation to those theta angles # with considerably low intensity (gaps) delta_theta = max_i_theta - min_i_theta gaps = self._findGaps(wn_sten, int(min_i_theta + 0.1 * delta_theta), int(max_i_theta - 0.1 * delta_theta)) sapi.CloneWorkspace(InputWorkspace=wn_sten, OutputWorkspace=wn_steni) for gap in gaps: self._interpolate(wn_steni, gap) # interpolate this gap # Convert S(theta,E) to S(Q,E), then rebin in |Q| and E to MD workspace sapi.ConvertToMD(InputWorkspace=wn_steni, QDimensions='|Q|', dEAnalysisMode='Direct', OutputWorkspace=wn_sqe) Qmin = self._qbins[0] Qmax = self._qbins[-1] dQ = self._qbins[1] Qrange = '|Q|,{0},{1},{2}'.format(Qmin, Qmax, int((Qmax - Qmin) / dQ)) Ei_min = self._ebins[0] Ei_max = self._ebins[-1] dE = self._ebins[1] deltaErange = 'DeltaE,{0},{1},{2}'.format(Ei_min, Ei_max, int((Ei_max - Ei_min) / dE)) sapi.BinMD(InputWorkspace=wn_sqe, AxisAligned=1, AlignedDim0=Qrange, AlignedDim1=deltaErange, OutputWorkspace=wn_sqeb) # Slice the data by transforming to a Matrix2Dworkspace, # with deltaE along the vertical axis sapi.ConvertMDHistoToMatrixWorkspace( InputWorkspace=wn_sqeb, Normalization='NumEventsNormalization', OutputWorkspace=wn_sqes) # Ensure correct units sapi.mtd[wn_sqes].getAxis(0).setUnit("MomentumTransfer") sapi.mtd[wn_sqes].getAxis(1).setUnit("DeltaE") # Shift the energy axis, since the reported values should be the center # of the bins, instead of the minimum bin boundary ws_sqes = sapi.mtd[wn_sqes] Eaxis = ws_sqes.getAxis(1) e_shift = self._ebins[1] / 2.0 for i in range(Eaxis.length()): Eaxis.setValue(i, Eaxis.getValue(i) + e_shift) # Normalize each slice, if requested if self._snorm: sapi.Integration(InputWorkspace=wn_sqes, OutputWorkspace=wn_sqesn) sapi.Divide(LHSWorkspace=wn_sqes, RHSWorkspace=wn_sqesn, OutputWorkspace=wn_sqes) # Clean up workspaces from intermediate steps if self._clean: for name in (wn_van, wn_reduced, wn_ste, wn_van_st, wn_sten, wn_steni, wn_sqe, wn_sqeb, wn_sqesn, 'PreprocessedDetectorsWS'): if sapi.mtd.doesExist(name): sapi.DeleteWorkspace(name) # Ouput some info as a Notice in the log ebins = ', '.join(['{0:.2f}'.format(x) for x in self._ebins]) qbins = ', '.join(['{0:.2f}'.format(x) for x in self._qbins]) tbins = '{0:.2f} {1:.2f} {2:.2f}'.format(min_i_theta * dtheta, dtheta, max_i_theta * dtheta) message = '\n****** SOME OUTPUT INFORMATION ***' + \ '\nEnergy bins: ' + ebins + \ '\nQ bins: ' + qbins + \ '\nTheta bins: '+tbins kapi.logger.notice(message) self.setProperty("OutputWorkspace", sapi.mtd[wn_sqes])
def _PyExec(self): # Collect Flux Normalization if self.getProperty('DoFluxNormalization').value is True: self._flux_normalization_type =\ self.getProperty('FluxNormalizationType').value if self._flux_normalization_type == 'Monitor': self._MonNorm = True self._reflection =\ REFLECTIONS_DICT[self.getProperty('ReflectionType').value] self._doIndiv = self.getProperty('DoIndividual').value # micro-eV to mili-eV self._etBins = 1.E-03 * self.getProperty('EnergyBins').value self._qBins = self.getProperty('MomentumTransferBins').value self._qBins[0] -= self._qBins[1] / 2.0 # leftmost bin boundary self._qBins[2] += self._qBins[1] / 2.0 # rightmost bin boundary self._maskFile = self.getProperty('MaskFile').value maskfile = self.getProperty('MaskFile').value self._maskFile = maskfile if maskfile else\ pjoin(DEFAULT_MASK_GROUP_DIR, self._reflection['mask_file']) self._groupDetOpt = self.getProperty('GroupDetectors').value self._normalizeToFirst = self.getProperty('NormalizeToFirst').value self._doNorm = self.getProperty('DivideByVanadium').value # retrieve properties pertaining to saving to NXSPE file self._nsxpe_do = self.getProperty('SaveNXSPE').value if self._nsxpe_do: self._nxspe_psi_angle_log = self.getProperty('PsiAngleLog').value self._nxspe_offset = self.getProperty('PsiOffset').value # Apply default mask if not supplied by user self._overrideMask = bool(self._maskFile) if not self._overrideMask: mantid_config.appendDataSearchDir(DEFAULT_MASK_GROUP_DIR) self._maskFile = self._reflection['mask_file'] self._maskWs = tws('BASIS_MASK') sapi.LoadMask(Instrument='BASIS', OutputWorkspace=self._maskWs, InputFile=self._maskFile) # Work around length issue _dMask = sapi.ExtractMask(InputWorkspace=self._maskWs, OutputWorkspace=tws('ExtractMask')) self._dMask = _dMask[1] # # Process the Vanadium # norm_runs = self.getProperty('NormRunNumbers').value if self._doNorm and bool(norm_runs): self._normalizationType = self.getProperty( 'NormalizationType').value self.log().information('Divide by Vanadium with normalization' + self._normalizationType) # Following steps common to all types of Vanadium normalization # norm_runs encompasses a single set, thus _getRuns returns # a list of only one item norm_set = self._get_runs(norm_runs, doIndiv=False)[0] normWs = tws(self._make_run_name(norm_set[0]) + '_vanadium') self._sum_and_calibrate(norm_set, normWs) normRange = self._reflection['vanadium_wav_range'] bin_width = normRange[1] - normRange[0] # This rebin integrates counts onto a histogram of a single bin if self._normalizationType == 'by detector ID': self._normRange = [normRange[0], bin_width, normRange[1]] sapi.Rebin(InputWorkspace=normWs, OutputWorkspace=normWs, Params=self._normRange) self._normWs = normWs # Detectors outside limits are substituted by MedianDetectorTest self._normMask = tws('BASIS_NORM_MASK') sapi.FindDetectorsOutsideLimits( InputWorkspace=normWs, LowThreshold=1.0 * bin_width, # no count events outside ranges RangeLower=normRange[0], RangeUpper=normRange[1], OutputWorkspace=self._normMask) # additional reduction steps when normalizing by Q slice if self._normalizationType == 'by Q slice': self._normWs = self._group_and_SofQW(normWs, normWs, self._etBins, isSample=False) # # Process the sample # self._run_list = self._get_runs(self.getProperty('RunNumbers').value, doIndiv=self._doIndiv) for run_set in self._run_list: self._samWs = tws(self._make_run_name(run_set[0])) self._sum_and_calibrate(run_set, self._samWs) self._samWsRun = str(run_set[0]) # Divide by Vanadium detector ID, if pertinent if self._normalizationType == 'by detector ID': # Mask detectors with low Vanadium signal before dividing sapi.MaskDetectors(Workspace=self._samWs, MaskedWorkspace=self._normMask) sapi.Divide(LHSWorkspace=self._samWs, RHSWorkspace=self._normWs, OutputWorkspace=self._samWs) # additional reduction steps prefix = self._make_run_name(run_set[0]) self._samSqwWs = self._group_and_SofQW(self._samWs, prefix, self._etBins, isSample=True) # Divide by Vanadium Q slice, if pertinent if self._normalizationType == 'by Q slice': sapi.Divide(LHSWorkspace=self._samSqwWs, RHSWorkspace=self._normWs, OutputWorkspace=self._samSqwWs) # Clear mask from reduced file. Needed for binary operations # involving this S(Q,w) sapi.ClearMaskFlag(Workspace=self._samSqwWs) # Scale so that elastic line has Y-values ~ 1 if self._normalizeToFirst: self._ScaleY(self._samSqwWs) # Transform the vertical axis (Q) to point data # Q-values are in X-axis now sapi.Transpose(InputWorkspace=self._samSqwWs, OutputWorkspace=self._samSqwWs) # from histo to point sapi.ConvertToPointData(InputWorkspace=self._samSqwWs, OutputWorkspace=self._samSqwWs) # Q-values back to vertical axis sapi.Transpose(InputWorkspace=self._samSqwWs, OutputWorkspace=self._samSqwWs) self.serialize_in_log(self._samSqwWs) # store the call # Output Dave and Nexus files extension = '_divided.dat' if self._doNorm else '.dat' dave_grp_filename = self._make_run_name(self._samWsRun, False) + \ extension sapi.SaveDaveGrp(Filename=dave_grp_filename, InputWorkspace=self._samSqwWs, ToMicroEV=True) extension = '_divided_sqw.nxs' if self._doNorm else '_sqw.nxs' processed_filename = self._make_run_name(self._samWsRun, False) + \ extension sapi.SaveNexus(Filename=processed_filename, InputWorkspace=self._samSqwWs) # additional output if self.getProperty('OutputSusceptibility').value: temperature = mtd[self._samSqwWs].getRun().\ getProperty(TEMPERATURE_SENSOR).getStatistics().mean samXqsWs = self._samSqwWs.replace('sqw', 'Xqw') sapi.ApplyDetailedBalance(InputWorkspace=self._samSqwWs, OutputWorkspace=samXqsWs, Temperature=str(temperature)) sapi.ConvertUnits(InputWorkspace=samXqsWs, OutputWorkspace=samXqsWs, Target='DeltaE_inFrequency') self.serialize_in_log(samXqsWs) susceptibility_filename = processed_filename.replace( 'sqw', 'Xqw') sapi.SaveNexus(Filename=susceptibility_filename, InputWorkspace=samXqsWs) if self.getProperty('OutputPowderSpectrum').value: self.generatePowderSpectrum()
def PyExec(self): config['default.facility'] = 'SNS' config['default.instrument'] = 'ARCS' self._runs = self.getProperty('RunNumbers').value self._vanfile = self.getProperty('Vanadium').value self._ecruns = self.getProperty('EmptyCanRunNumbers').value self._ebins_str = self.getProperty('EnergyBins').value self._qbins_str = self.getProperty('MomentumTransferBins').value self._snorm = self.getProperty('NormalizeSlices').value self._clean = self.getProperty('CleanWorkspaces').value wn_sqes = self.getPropertyValue("OutputWorkspace") # workspace names prefix = '' if self._clean: prefix = '__' # Sample files wn_data = prefix + 'data' wn_van = prefix + 'vanadium' wn_reduced = prefix + 'reduced' wn_ste = prefix + 'S_theta_E' wn_van_st = prefix + 'vanadium_S_theta' wn_sten = prefix + 'S_theta_E_normalized' wn_steni = prefix + 'S_theta_E_normalized_interp' wn_sqe = prefix + 'S_Q_E' wn_sqeb = prefix + 'S_Q_E_binned' wn_sqesn = prefix + wn_sqes + '_norm' # Empty can files wn_ec_data = prefix + 'ec_data' wn_ec_reduced = prefix + 'ec_reduced' wn_ec_ste = prefix + 'ec_S_theta_E' datasearch = config["datasearch.searcharchive"] if datasearch != "On": config["datasearch.searcharchive"] = "On" # Load several event files into a sinle workspace. The nominal incident # energy should be the same to avoid difference in energy resolution api.Load(Filename=self._runs, OutputWorkspace=wn_data) # Load the vanadium file, assume to be preprocessed, meaning that # for every detector all events whithin a particular wide wavelength # range have been rebinned into a single histogram api.Load(Filename=self._vanfile, OutputWorkspace=wn_van) # Load empty can event files, if present if self._ecruns: api.Load(Filename=self._ecruns, OutputWorkspace=wn_ec_data) # Retrieve the mask from the vanadium workspace, and apply it to the data # (and empty can, if submitted) api.MaskDetectors(Workspace=wn_data, MaskedWorkspace=wn_van) if self._ecruns: api.MaskDetectors(Workspace=wn_ec_data, MaskedWorkspace=wn_van) # Obtain incident energy as the mean of the nominal Ei values. # There is one nominal value per events file. ws_data = api.mtd[wn_data] Ei = ws_data.getRun()['EnergyRequest'].getStatistics().mean Ei_std = ws_data.getRun()['EnergyRequest'].getStatistics( ).standard_deviation # Verify empty can runs were obtained at similar energy if self._ecruns: ws_ec_data = api.mtd[wn_ec_data] ec_Ei = ws_ec_data.getRun()['EnergyRequest'].getStatistics().mean if abs(Ei - ec_Ei) > Ei_std: raise RuntimeError( 'Empty can runs were obtained at a significant' + ' different incident energy than the sample runs') # Obtain energy range self._ebins = [ float(x) for x in re.compile(r'\d+[\.\d+]*').findall(self._ebins_str) ] if len(self._ebins) == 1: ws_data = api.mtd[wn_data] Ei = ws_data.getRun()['EnergyRequest'].getStatistics().mean self._ebins.insert(0, -0.5 * Ei) # prepend self._ebins.append(0.95 * Ei) # append # Enforce that the elastic energy (E=0) lies in the middle of the # central bin with an appropriate small shift in the energy range Ei_min_reduced = self._ebins[0] / self._ebins[1] remainder = Ei_min_reduced - int(Ei_min_reduced) if remainder >= 0.0: erange_shift = self._ebins[1] * (0.5 - remainder) else: erange_shift = self._ebins[1] * (-0.5 - remainder) self._ebins[0] += erange_shift # shift minimum energy self._ebins[-1] += erange_shift # shift maximum energy # Convert to energy transfer. Normalize by proton charge. # The output workspace is S(detector-id,E) factor = 0.1 # a fine energy bin Erange = '{0},{1},{2}'.format(self._ebins[0], factor * self._ebins[1], self._ebins[2]) api.DgsReduction(SampleInputWorkspace=wn_data, EnergyTransferRange=Erange, OutputWorkspace=wn_reduced) if self._ecruns: api.DgsReduction(SampleInputWorkspace=wn_ec_data, EnergyTransferRange=Erange, IncidentBeamNormalisation='ByCurrent', OutputWorkspace=wn_ec_reduced) # Obtain maximum and minimum |Q| values, as well as dQ if none passed self._qbins = [ float(x) for x in re.compile(r'\d+[\.\d+]*').findall(self._qbins_str) ] if len(self._qbins) < 3: if not self._qbins: # insert dQ if empty qbins dE = self._ebins[1] self._qbins.append( numpy.sqrt((Ei + dE) / ENERGY_TO_WAVEVECTOR) - numpy.sqrt(Ei / ENERGY_TO_WAVEVECTOR)) mins, maxs = api.ConvertToMDMinMaxLocal(wn_reduced, Qdimensions='|Q|', dEAnalysisMode='Direct') self._qbins.insert(0, mins[0]) # prepend minimum Q self._qbins.append(maxs[0]) # append maximum Q # Clean up the events files. They take a lot of space in memory api.DeleteWorkspace(wn_data) if self._ecruns: api.DeleteWorkspace(wn_ec_data) # Convert to S(theta,E) ki = numpy.sqrt(Ei / ENERGY_TO_WAVEVECTOR) factor = 1. / 5 # a reasonable (heuristic) value # If dE is the smallest energy transfer considered, # then dQ/ki is the smallest dtheta (in radians) dtheta = factor * self._qbins[1] / ki * (180.0 / numpy.pi) # very small dtheta (<0.15 degrees) prevents interpolation dtheta = max(0.15, dtheta) group_file_os_handle, group_file_name = mkstemp(suffix='.xml') group_file_handle = os.fdopen(group_file_os_handle, 'w') api.GenerateGroupingPowder(InputWorkspace=wn_reduced, AngleStep=dtheta, GroupingFilename=group_file_name) group_file_handle.close() api.GroupDetectors(InputWorkspace=wn_reduced, MapFile=group_file_name, OutputWorkspace=wn_ste) if self._ecruns: api.GroupDetectors(InputWorkspace=wn_ec_reduced, MapFile=group_file_name, OutputWorkspace=wn_ec_ste) # Substract the empty can from the can+sample api.Minus(LHSWorkspace=wn_ste, RHSWorkspace=wn_ec_ste, OutputWorkspace=wn_ste) # Normalize by the vanadium intensity, but before that we need S(theta) # for the vanadium. Recall every detector has all energies into a single # bin, so we get S(theta) instead of S(theta,E) api.GroupDetectors(InputWorkspace=wn_van, MapFile=group_file_name, OutputWorkspace=wn_van_st) os.remove(group_file_name) # no need for this file api.Divide(wn_ste, wn_van_st, OutputWorkspace=wn_sten) api.ClearMaskFlag(Workspace=wn_sten) max_i_theta = 0.0 min_i_theta = 0.0 # Linear interpolation # First, find minimum theta index with a non-zero histogram ws_sten = api.mtd[wn_sten] for i_theta in range(ws_sten.getNumberHistograms()): if ws_sten.dataY(i_theta).any(): min_i_theta = i_theta break # second, find maximum theta with a non-zero histogram for i_theta in range(ws_sten.getNumberHistograms() - 1, -1, -1): if ws_sten.dataY(i_theta).any(): max_i_theta = i_theta break # Scan the region [min_i_theta, max_i_theta] and apply interpolation to # theta angles with no signal whatsoever, S(theta*, E)=0.0 for all energies api.CloneWorkspace(InputWorkspace=wn_sten, OutputWorkspace=wn_steni) ws_steni = api.mtd[wn_steni] i_theta = 1 + min_i_theta while i_theta < max_i_theta: if not ws_steni.dataY(i_theta).any(): nonnull_i_theta_start = i_theta - 1 # angle index of non-null histogram # scan until we find a non-null histogram while not ws_steni.dataY(i_theta).any(): i_theta += 1 nonnull_i_theta_end = i_theta # angle index of non-null histogram # The range [1+nonnull_i_theta_start, nonnull_i_theta_end] # contains only null-histograms. Interpolate! y_start = ws_steni.dataY(nonnull_i_theta_start) y_end = ws_steni.dataY(nonnull_i_theta_end) intercept = y_start slope = (y_end - y_start) / (nonnull_i_theta_end - nonnull_i_theta_start) for null_i_theta in range(1 + nonnull_i_theta_start, nonnull_i_theta_end): ws_steni.dataY(null_i_theta)[:] = intercept + slope * ( null_i_theta - nonnull_i_theta_start) i_theta += 1 # Convert S(theta,E) to S(Q,E), then rebin in |Q| and E to MD workspace api.ConvertToMD(InputWorkspace=wn_steni, QDimensions='|Q|', dEAnalysisMode='Direct', OutputWorkspace=wn_sqe) Qmin = self._qbins[0] Qmax = self._qbins[-1] dQ = self._qbins[1] Qrange = '|Q|,{0},{1},{2}'.format(Qmin, Qmax, int((Qmax - Qmin) / dQ)) Ei_min = self._ebins[0] Ei_max = self._ebins[-1] dE = self._ebins[1] deltaErange = 'DeltaE,{0},{1},{2}'.format(Ei_min, Ei_max, int((Ei_max - Ei_min) / dE)) api.BinMD(InputWorkspace=wn_sqe, AxisAligned=1, AlignedDim0=Qrange, AlignedDim1=deltaErange, OutputWorkspace=wn_sqeb) # Slice the data by transforming to a Matrix2Dworkspace, with deltaE along the vertical axis api.ConvertMDHistoToMatrixWorkspace( InputWorkspace=wn_sqeb, Normalization='NumEventsNormalization', OutputWorkspace=wn_sqes) # Shift the energy axis, since the reported values should be the center # of the bins, instead of the minimum bin boundary ws_sqes = api.mtd[wn_sqes] Eaxis = ws_sqes.getAxis(1) e_shift = self._ebins[1] / 2.0 for i in range(Eaxis.length()): Eaxis.setValue(i, Eaxis.getValue(i) + e_shift) # Normalize each slice if self._snorm: api.Integration(InputWorkspace=wn_sqes, OutputWorkspace=wn_sqesn) api.Divide(LHSWorkspace=wn_sqes, RHSWorkspace=wn_sqesn, OutputWorkspace=wn_sqes) # Clean up workspaces from intermediate steps if self._clean: for name in (wn_van, wn_reduced, wn_ste, wn_van_st, wn_sten, wn_steni, wn_sqe, wn_sqeb, wn_sqesn): api.DeleteWorkspace(name) if api.mtd.doesExist('PreprocessedDetectorsWS'): api.DeleteWorkspace('PreprocessedDetectorsWS') # Ouput some info message = '\n****** SOME OUTPUT INFORMATION ***' + \ '\nEnergy bins: ' + ', '.join(['{0:.2f}'.format(x) for x in self._ebins]) + \ '\nQ bins: ' + ', '.join(['{0:.2f}'.format(x) for x in self._qbins]) + \ '\nTheta bins: {0:.2f} {1:.2f} {2:.2f}'.format(min_i_theta * dtheta, dtheta, max_i_theta * dtheta) logger.notice(message) self.setProperty("OutputWorkspace", api.mtd[wn_sqes])
def PyExec(self): config['default.facility'] = "SNS" config['default.instrument'] = self._long_inst self._reflection = REFLECTIONS_DICT[self.getProperty( "ReflectionType").value] self._doIndiv = self.getProperty("DoIndividual").value self._etBins = 1.E-03 * self.getProperty( "EnergyBins").value # micro-eV to mili-eV self._qBins = self.getProperty("MomentumTransferBins").value self._qBins[0] -= self._qBins[ 1] / 2.0 # self._qBins[0] is leftmost bin boundary self._qBins[2] += self._qBins[ 1] / 2.0 # self._qBins[2] is rightmost bin boundary self._noMonNorm = self.getProperty("NoMonitorNorm").value self._maskFile = self.getProperty("MaskFile").value self._groupDetOpt = self.getProperty("GroupDetectors").value self._normalizeToFirst = self.getProperty("NormalizeToFirst").value self._doNorm = self.getProperty("DivideByVanadium").value datasearch = config["datasearch.searcharchive"] if datasearch != "On": config["datasearch.searcharchive"] = "On" # Apply default mask if not supplied by user self._overrideMask = bool(self._maskFile) if not self._overrideMask: config.appendDataSearchDir(DEFAULT_MASK_GROUP_DIR) self._maskFile = self._reflection["mask_file"] sapi.LoadMask(Instrument='BASIS', OutputWorkspace='BASIS_MASK', InputFile=self._maskFile) # Work around length issue _dMask = sapi.ExtractMask('BASIS_MASK') self._dMask = _dMask[1] sapi.DeleteWorkspace(_dMask[0]) ############################ ## Process the Vanadium ## ############################ norm_runs = self.getProperty("NormRunNumbers").value if self._doNorm and bool(norm_runs): if ";" in norm_runs: raise SyntaxError("Normalization does not support run groups") self._normalizationType = self.getProperty( "NormalizationType").value self.log().information("Divide by Vanadium with normalization" + self._normalizationType) # The following steps are common to all types of Vanadium normalization # norm_runs encompasses a single set, thus _getRuns returns # a list of only one item norm_set = self._getRuns(norm_runs, doIndiv=False)[0] normWs = self._sum_and_calibrate(norm_set, extra_extension="_norm") # This rebin integrates counts onto a histogram of a single bin if self._normalizationType == "by detectorID": normRange = self.getProperty("NormWavelengthRange").value self._normRange = [ normRange[0], normRange[1] - normRange[0], normRange[1] ] sapi.Rebin(InputWorkspace=normWs, OutputWorkspace=normWs, Params=self._normRange) # FindDetectorsOutsideLimits to be substituted by MedianDetectorTest sapi.FindDetectorsOutsideLimits(InputWorkspace=normWs, OutputWorkspace="BASIS_NORM_MASK") # additional reduction steps when normalizing by Q slice if self._normalizationType == "by Q slice": self._normWs = self._group_and_SofQW(normWs, self._etBins, isSample=False) if not self._debugMode: sapi.DeleteWorkspace(normWs) # Delete vanadium events file ########################## ## Process the sample ## ########################## self._run_list = self._getRuns(self.getProperty("RunNumbers").value, doIndiv=self._doIndiv) for run_set in self._run_list: self._samWs = self._sum_and_calibrate(run_set) self._samWsRun = str(run_set[0]) # Divide by Vanadium detector ID, if pertinent if self._normalizationType == "by detector ID": # Mask detectors with insufficient Vanadium signal before dividing sapi.MaskDetectors(Workspace=self._samWs, MaskedWorkspace='BASIS_NORM_MASK') sapi.Divide(LHSWorkspace=self._samWs, RHSWorkspace=self._normWs, OutputWorkspace=self._samWs) # additional reduction steps self._samSqwWs = self._group_and_SofQW(self._samWs, self._etBins, isSample=True) if not self._debugMode: sapi.DeleteWorkspace(self._samWs) # delete events file # Divide by Vanadium Q slice, if pertinent if self._normalizationType == "by Q slice": sapi.Divide(LHSWorkspace=self._samSqwWs, RHSWorkspace=self._normWs, OutputWorkspace=self._samSqwWs) # Clear mask from reduced file. Needed for binary operations # involving this S(Q,w) sapi.ClearMaskFlag(Workspace=self._samSqwWs) # Scale so that elastic line has Y-values ~ 1 if self._normalizeToFirst: self._ScaleY(self._samSqwWs) # Transform the vertical axis to point data sapi.Transpose( InputWorkspace=self._samSqwWs, OutputWorkspace=self._samSqwWs) # Q-values are in X-axis now sapi.ConvertToPointData( InputWorkspace=self._samSqwWs, OutputWorkspace=self._samSqwWs) # from histo to point sapi.Transpose(InputWorkspace=self._samSqwWs, OutputWorkspace=self._samSqwWs ) # Q-values back to vertical axis # Output Dave and Nexus files extension = "_divided.dat" if self._doNorm else ".dat" dave_grp_filename = self._makeRunName(self._samWsRun, False) + extension sapi.SaveDaveGrp(Filename=dave_grp_filename, InputWorkspace=self._samSqwWs, ToMicroEV=True) extension = "_divided_sqw.nxs" if self._doNorm else "_sqw.nxs" processed_filename = self._makeRunName(self._samWsRun, False) + extension sapi.SaveNexus(Filename=processed_filename, InputWorkspace=self._samSqwWs) if not self._debugMode: sapi.DeleteWorkspace("BASIS_MASK") # delete the mask if self._doNorm and bool(norm_runs): sapi.DeleteWorkspace("BASIS_NORM_MASK") # delete vanadium mask sapi.DeleteWorkspace(self._normWs) # Delete vanadium S(Q)
def PyExec(self): config['default.facility'] = 'SNS' config['default.instrument'] = self._long_inst self._reflection =\ REFLECTIONS_DICT[self.getProperty('ReflectionType').value] self._doIndiv = self.getProperty('DoIndividual').value # micro-eV to mili-eV self._etBins = 1.E-03 * self.getProperty('EnergyBins').value self._qBins = self.getProperty('MomentumTransferBins').value self._qBins[0] -= self._qBins[1]/2.0 # leftmost bin boundary self._qBins[2] += self._qBins[1]/2.0 # rightmost bin boundary self._MonNorm = self.getProperty('MonitorNorm').value self._maskFile = self.getProperty('MaskFile').value maskfile = self.getProperty('MaskFile').value self._maskFile = maskfile if maskfile else\ pjoin(DEFAULT_MASK_GROUP_DIR, self._reflection['mask_file']) self._groupDetOpt = self.getProperty('GroupDetectors').value self._normalizeToFirst = self.getProperty('NormalizeToFirst').value self._doNorm = self.getProperty('DivideByVanadium').value # retrieve properties pertaining to saving to NXSPE file self._nsxpe_do = self.getProperty('SaveNXSPE').value if self._nsxpe_do: self._nxspe_psi_angle_log = self.getProperty('PsiAngleLog').value self._nxspe_offset = self.getProperty('PsiOffset').value datasearch = config["datasearch.searcharchive"] if datasearch != "On": config["datasearch.searcharchive"] = "On" # Apply default mask if not supplied by user self._overrideMask = bool(self._maskFile) if not self._overrideMask: config.appendDataSearchDir(DEFAULT_MASK_GROUP_DIR) self._maskFile = self._reflection["mask_file"] sapi.LoadMask(Instrument='BASIS', OutputWorkspace='BASIS_MASK', InputFile=self._maskFile) # Work around length issue _dMask = sapi.ExtractMask('BASIS_MASK') self._dMask = _dMask[1] sapi.DeleteWorkspace(_dMask[0]) ############################ ## Process the Vanadium ## ############################ norm_runs = self.getProperty("NormRunNumbers").value if self._doNorm and bool(norm_runs): if ";" in norm_runs: raise SyntaxError("Normalization does not support run groups") self._normalizationType = self.getProperty("NormalizationType").value self.log().information("Divide by Vanadium with normalization" + self._normalizationType) # Following steps common to all types of Vanadium normalization # norm_runs encompasses a single set, thus _getRuns returns # a list of only one item norm_set = self._getRuns(norm_runs, doIndiv=False)[0] normWs = self._sum_and_calibrate(norm_set, extra_extension="_norm") normRange = self.getProperty("NormWavelengthRange").value bin_width = normRange[1] - normRange[0] # This rebin integrates counts onto a histogram of a single bin if self._normalizationType == "by detector ID": self._normRange = [normRange[0], bin_width, normRange[1]] sapi.Rebin(InputWorkspace=normWs, OutputWorkspace=normWs, Params=self._normRange) self._normWs = normWs # FindDetectorsOutsideLimits to be substituted by MedianDetectorTest sapi.FindDetectorsOutsideLimits(InputWorkspace=normWs, LowThreshold=1.0*bin_width, # no count events outside ranges RangeLower=normRange[0], RangeUpper=normRange[1], OutputWorkspace='BASIS_NORM_MASK') # additional reduction steps when normalizing by Q slice if self._normalizationType == "by Q slice": self._normWs = self._group_and_SofQW(normWs, self._etBins, isSample=False) ########################## ## Process the sample ## ########################## self._run_list = self._getRuns(self.getProperty("RunNumbers").value, doIndiv=self._doIndiv) for run_set in self._run_list: self._samWs = self._sum_and_calibrate(run_set) self._samWsRun = str(run_set[0]) # Divide by Vanadium detector ID, if pertinent if self._normalizationType == "by detector ID": # Mask detectors with insufficient Vanadium signal before dividing sapi.MaskDetectors(Workspace=self._samWs, MaskedWorkspace='BASIS_NORM_MASK') sapi.Divide(LHSWorkspace=self._samWs, RHSWorkspace=self._normWs, OutputWorkspace=self._samWs) # additional reduction steps self._samSqwWs = self._group_and_SofQW(self._samWs, self._etBins, isSample=True) if not self._debugMode: sapi.DeleteWorkspace(self._samWs) # delete events file # Divide by Vanadium Q slice, if pertinent if self._normalizationType == "by Q slice": sapi.Divide(LHSWorkspace=self._samSqwWs, RHSWorkspace=self._normWs, OutputWorkspace=self._samSqwWs) # Clear mask from reduced file. Needed for binary operations # involving this S(Q,w) sapi.ClearMaskFlag(Workspace=self._samSqwWs) # Scale so that elastic line has Y-values ~ 1 if self._normalizeToFirst: self._ScaleY(self._samSqwWs) # Transform the vertical axis (Q) to point data # Q-values are in X-axis now sapi.Transpose(InputWorkspace=self._samSqwWs, OutputWorkspace=self._samSqwWs) # from histo to point sapi.ConvertToPointData(InputWorkspace=self._samSqwWs, OutputWorkspace=self._samSqwWs) # Q-values back to vertical axis sapi.Transpose(InputWorkspace=self._samSqwWs, OutputWorkspace=self._samSqwWs) self.serialize_in_log(self._samSqwWs) # store the call # Output Dave and Nexus files extension = "_divided.dat" if self._doNorm else ".dat" dave_grp_filename = self._makeRunName(self._samWsRun, False) +\ extension sapi.SaveDaveGrp(Filename=dave_grp_filename, InputWorkspace=self._samSqwWs, ToMicroEV=True) extension = "_divided_sqw.nxs" if self._doNorm else "_sqw.nxs" processed_filename = self._makeRunName(self._samWsRun, False) +\ extension sapi.SaveNexus(Filename=processed_filename, InputWorkspace=self._samSqwWs) # additional output if self.getProperty("OutputSusceptibility").value: temperature = mtd[self._samSqwWs].getRun().\ getProperty(TEMPERATURE_SENSOR).getStatistics().mean samXqsWs = self._samSqwWs.replace("sqw", "Xqw") sapi.ApplyDetailedBalance(InputWorkspace=self._samSqwWs, OutputWorkspace=samXqsWs, Temperature=str(temperature)) sapi.ConvertUnits(InputWorkspace=samXqsWs, OutputWorkspace=samXqsWs, Target="DeltaE_inFrequency", Emode="Indirect") self.serialize_in_log(samXqsWs) susceptibility_filename = processed_filename.replace("sqw", "Xqw") sapi.SaveNexus(Filename=susceptibility_filename, InputWorkspace=samXqsWs) if not self._debugMode: sapi.DeleteWorkspace("BASIS_MASK") # delete the mask if self._doNorm and bool(norm_runs): sapi.DeleteWorkspace("BASIS_NORM_MASK") # delete vanadium mask sapi.DeleteWorkspace(self._normWs) # Delete vanadium S(Q) if self._normalizationType == "by Q slice": sapi.DeleteWorkspace(normWs) # Delete vanadium events file if self.getProperty("ExcludeTimeSegment").value: sapi.DeleteWorkspace('splitter') [sapi.DeleteWorkspace(name) for name in ('splitted_unfiltered', 'TOFCorrectWS') if AnalysisDataService.doesExist(name)]
def PyExec(self): config['default.facility'] = "SNS" config['default.instrument'] = self._long_inst self._doIndiv = self.getProperty("DoIndividual").value self._etBins = self.getProperty("EnergyBins").value / MICROEV_TO_MILLIEV self._qBins = self.getProperty("MomentumTransferBins").value self._noMonNorm = self.getProperty("NoMonitorNorm").value self._maskFile = self.getProperty("MaskFile").value self._groupDetOpt = self.getProperty("GroupDetectors").value datasearch = config["datasearch.searcharchive"] if (datasearch != "On"): config["datasearch.searcharchive"] = "On" # Handle masking file override if necessary self._overrideMask = bool(self._maskFile) if not self._overrideMask: config.appendDataSearchDir(DEFAULT_MASK_GROUP_DIR) self._maskFile = DEFAULT_MASK_FILE api.LoadMask(Instrument='BASIS', OutputWorkspace='BASIS_MASK', InputFile=self._maskFile) # Work around length issue _dMask = api.ExtractMask('BASIS_MASK') self._dMask = _dMask[1] api.DeleteWorkspace(_dMask[0]) # Do normalization if run numbers are present norm_runs = self.getProperty("NormRunNumbers").value self._doNorm = bool(norm_runs) self.log().information("Do Norm: " + str(self._doNorm)) if self._doNorm: if ";" in norm_runs: raise SyntaxError("Normalization does not support run groups") # Setup the integration (rebin) parameters normRange = self.getProperty("NormWavelengthRange").value self._normRange = [normRange[0], normRange[1]-normRange[0], normRange[1]] # Process normalization runs self._norm_run_list = self._getRuns(norm_runs) for norm_set in self._norm_run_list: extra_extension = "_norm" self._normWs = self._makeRunName(norm_set[0]) self._normWs += extra_extension self._normMonWs = self._normWs + "_monitors" self._sumRuns(norm_set, self._normWs, self._normMonWs, extra_extension) self._calibData(self._normWs, self._normMonWs) api.Rebin(InputWorkspace=self._normWs, OutputWorkspace=self._normWs, Params=self._normRange) api.FindDetectorsOutsideLimits(InputWorkspace=self._normWs, OutputWorkspace="BASIS_NORM_MASK") self._run_list = self._getRuns(self.getProperty("RunNumbers").value) for run_set in self._run_list: self._samWs = self._makeRunName(run_set[0]) self._samMonWs = self._samWs + "_monitors" self._samWsRun = str(run_set[0]) self._sumRuns(run_set, self._samWs, self._samMonWs) # After files are all added, run the reduction self._calibData(self._samWs, self._samMonWs) if self._doNorm: api.MaskDetectors(Workspace=self._samWs, MaskedWorkspace='BASIS_NORM_MASK') api.Divide(LHSWorkspace=self._samWs, RHSWorkspace=self._normWs, OutputWorkspace=self._samWs) api.ConvertUnits(InputWorkspace=self._samWs, OutputWorkspace=self._samWs, Target='DeltaE', EMode='Indirect') api.CorrectKiKf(InputWorkspace=self._samWs, OutputWorkspace=self._samWs, EMode='Indirect') api.Rebin(InputWorkspace=self._samWs, OutputWorkspace=self._samWs, Params=self._etBins) if self._groupDetOpt != "None": if self._groupDetOpt == "Low-Resolution": grp_file = "BASIS_Grouping_LR.xml" else: grp_file = "BASIS_Grouping.xml" # If mask override used, we need to add default grouping file location to # search paths if self._overrideMask: config.appendDataSearchDir(DEFAULT_MASK_GROUP_DIR) api.GroupDetectors(InputWorkspace=self._samWs, OutputWorkspace=self._samWs, MapFile=grp_file, Behaviour="Sum") self._samSqwWs = self._samWs+'_sqw' api.SofQW3(InputWorkspace=self._samWs, OutputWorkspace=self._samSqwWs, QAxisBinning=self._qBins, EMode='Indirect', EFixed='2.0826') # Clear mask from reduced file. Needed for binary operations involving this S(Q,w) api.ClearMaskFlag(Workspace=self._samSqwWs) dave_grp_filename = self._makeRunName(self._samWsRun, False) + ".dat" api.SaveDaveGrp(Filename=dave_grp_filename, InputWorkspace=self._samSqwWs, ToMicroEV=True) processed_filename = self._makeRunName(self._samWsRun, False) + "_sqw.nxs" api.SaveNexus(Filename=processed_filename, InputWorkspace=self._samSqwWs)