def PyExec(self): # get parameter values wsString = self.getPropertyValue("InputWorkspace").strip() #internal values wsOutput = "__OutputWorkspace" wsTemp = "__Sort_temp" #get the workspace list wsNames = [] for wsName in wsString.split(","): ws = mtd[wsName.strip()] if isinstance(ws, WorkspaceGroup): wsNames.extend(ws.getNames()) else: wsNames.append(wsName) if wsOutput in mtd: ms.DeleteWorkspace(Workspace=wsOutput) sortStat = [] for wsName in wsNames: if "qvectors" in wsName: #extract the spectrum ws = mtd[wsName.strip()] for s in range(0, ws.getNumberHistograms()): y_s = ws.readY(s) stuple = (self.GetXValue(y_s), s) sortStat.append(stuple) sortStat.sort() if len(sortStat) == 0: raise RuntimeError("Cannot find file with qvectors, aborting") #sort spectra using norm of q for wsName in wsNames: ws = mtd[wsName.strip()] yUnit = ws.getAxis(1).getUnit().unitID() transposed = False if ws.getNumberHistograms() < len(sortStat): ms.Transpose(InputWorkspace=wsName, OutputWorkspace=wsName) transposed = True for norm, spec in sortStat: ms.ExtractSingleSpectrum(InputWorkspace=wsName, OutputWorkspace=wsTemp, WorkspaceIndex=spec) if wsOutput in mtd: ms.ConjoinWorkspaces(InputWorkspace1=wsOutput,InputWorkspace2=wsTemp,CheckOverlapping=False) if wsTemp in mtd: ms.DeleteWorkspace(Workspace=wsTemp) else: ms.RenameWorkspace(InputWorkspace=wsTemp, OutputWorkspace=wsOutput) #put norm as y value and copy units from input loopIndex = 0 wsOut = mtd[wsOutput] for norm, spec in sortStat: wsOut.getSpectrum(loopIndex).setSpectrumNo(int(norm*1000)) loopIndex = loopIndex + 1 if len(yUnit) > 0: wsOut.getAxis(1).setUnit(yUnit) if transposed: ms.Transpose(InputWorkspace=wsOutput, OutputWorkspace=wsOutput) ms.RenameWorkspace(InputWorkspace=wsOutput, OutputWorkspace=wsName)
def integrate_detector(self, ws, specular=True): """ Integrate a workspace along either the main direction (specular=False) or the low-resolution direction (specular=True. :param ws: Mantid workspace :param specular bool: if True, the low-resolution direction is integrated over """ ws_summed = api.RefRoi(InputWorkspace=ws, IntegrateY=specular, NXPixel=self.n_x_pixel, NYPixel=self.n_y_pixel, ConvertToQ=False, OutputWorkspace="ws_summed") integrated = api.Integration(ws_summed) integrated = api.Transpose(integrated) return integrated
def _PyExec(self): # Collect Flux Normalization if self.getProperty('DoFluxNormalization').value is True: self._flux_normalization_type =\ self.getProperty('FluxNormalizationType').value if self._flux_normalization_type == 'Monitor': self._MonNorm = True self._reflection =\ REFLECTIONS_DICT[self.getProperty('ReflectionType').value] self._doIndiv = self.getProperty('DoIndividual').value # micro-eV to mili-eV self._etBins = 1.E-03 * self.getProperty('EnergyBins').value self._qBins = self.getProperty('MomentumTransferBins').value self._qBins[0] -= self._qBins[1] / 2.0 # leftmost bin boundary self._qBins[2] += self._qBins[1] / 2.0 # rightmost bin boundary self._maskFile = self.getProperty('MaskFile').value maskfile = self.getProperty('MaskFile').value self._maskFile = maskfile if maskfile else\ pjoin(DEFAULT_MASK_GROUP_DIR, self._reflection['mask_file']) self._groupDetOpt = self.getProperty('GroupDetectors').value self._normalizeToFirst = self.getProperty('NormalizeToFirst').value self._doNorm = self.getProperty('DivideByVanadium').value # retrieve properties pertaining to saving to NXSPE file self._nsxpe_do = self.getProperty('SaveNXSPE').value if self._nsxpe_do: self._nxspe_psi_angle_log = self.getProperty('PsiAngleLog').value self._nxspe_offset = self.getProperty('PsiOffset').value # Apply default mask if not supplied by user self._overrideMask = bool(self._maskFile) if not self._overrideMask: mantid_config.appendDataSearchDir(DEFAULT_MASK_GROUP_DIR) self._maskFile = self._reflection['mask_file'] self._maskWs = tws('BASIS_MASK') sapi.LoadMask(Instrument='BASIS', OutputWorkspace=self._maskWs, InputFile=self._maskFile) # Work around length issue _dMask = sapi.ExtractMask(InputWorkspace=self._maskWs, OutputWorkspace=tws('ExtractMask')) self._dMask = _dMask[1] # # Process the Vanadium # norm_runs = self.getProperty('NormRunNumbers').value if self._doNorm and bool(norm_runs): self._normalizationType = self.getProperty( 'NormalizationType').value self.log().information('Divide by Vanadium with normalization' + self._normalizationType) # Following steps common to all types of Vanadium normalization # norm_runs encompasses a single set, thus _getRuns returns # a list of only one item norm_set = self._get_runs(norm_runs, doIndiv=False)[0] normWs = tws(self._make_run_name(norm_set[0]) + '_vanadium') self._sum_and_calibrate(norm_set, normWs) normRange = self._reflection['vanadium_wav_range'] bin_width = normRange[1] - normRange[0] # This rebin integrates counts onto a histogram of a single bin if self._normalizationType == 'by detector ID': self._normRange = [normRange[0], bin_width, normRange[1]] sapi.Rebin(InputWorkspace=normWs, OutputWorkspace=normWs, Params=self._normRange) self._normWs = normWs # Detectors outside limits are substituted by MedianDetectorTest self._normMask = tws('BASIS_NORM_MASK') sapi.FindDetectorsOutsideLimits( InputWorkspace=normWs, LowThreshold=1.0 * bin_width, # no count events outside ranges RangeLower=normRange[0], RangeUpper=normRange[1], OutputWorkspace=self._normMask) # additional reduction steps when normalizing by Q slice if self._normalizationType == 'by Q slice': self._normWs = self._group_and_SofQW(normWs, normWs, self._etBins, isSample=False) # # Process the sample # self._run_list = self._get_runs(self.getProperty('RunNumbers').value, doIndiv=self._doIndiv) for run_set in self._run_list: self._samWs = tws(self._make_run_name(run_set[0])) self._sum_and_calibrate(run_set, self._samWs) self._samWsRun = str(run_set[0]) # Divide by Vanadium detector ID, if pertinent if self._normalizationType == 'by detector ID': # Mask detectors with low Vanadium signal before dividing sapi.MaskDetectors(Workspace=self._samWs, MaskedWorkspace=self._normMask) sapi.Divide(LHSWorkspace=self._samWs, RHSWorkspace=self._normWs, OutputWorkspace=self._samWs) # additional reduction steps prefix = self._make_run_name(run_set[0]) self._samSqwWs = self._group_and_SofQW(self._samWs, prefix, self._etBins, isSample=True) # Divide by Vanadium Q slice, if pertinent if self._normalizationType == 'by Q slice': sapi.Divide(LHSWorkspace=self._samSqwWs, RHSWorkspace=self._normWs, OutputWorkspace=self._samSqwWs) # Clear mask from reduced file. Needed for binary operations # involving this S(Q,w) sapi.ClearMaskFlag(Workspace=self._samSqwWs) # Scale so that elastic line has Y-values ~ 1 if self._normalizeToFirst: self._ScaleY(self._samSqwWs) # Transform the vertical axis (Q) to point data # Q-values are in X-axis now sapi.Transpose(InputWorkspace=self._samSqwWs, OutputWorkspace=self._samSqwWs) # from histo to point sapi.ConvertToPointData(InputWorkspace=self._samSqwWs, OutputWorkspace=self._samSqwWs) # Q-values back to vertical axis sapi.Transpose(InputWorkspace=self._samSqwWs, OutputWorkspace=self._samSqwWs) self.serialize_in_log(self._samSqwWs) # store the call # Output Dave and Nexus files extension = '_divided.dat' if self._doNorm else '.dat' dave_grp_filename = self._make_run_name(self._samWsRun, False) + \ extension sapi.SaveDaveGrp(Filename=dave_grp_filename, InputWorkspace=self._samSqwWs, ToMicroEV=True) extension = '_divided_sqw.nxs' if self._doNorm else '_sqw.nxs' processed_filename = self._make_run_name(self._samWsRun, False) + \ extension sapi.SaveNexus(Filename=processed_filename, InputWorkspace=self._samSqwWs) # additional output if self.getProperty('OutputSusceptibility').value: temperature = mtd[self._samSqwWs].getRun().\ getProperty(TEMPERATURE_SENSOR).getStatistics().mean samXqsWs = self._samSqwWs.replace('sqw', 'Xqw') sapi.ApplyDetailedBalance(InputWorkspace=self._samSqwWs, OutputWorkspace=samXqsWs, Temperature=str(temperature)) sapi.ConvertUnits(InputWorkspace=samXqsWs, OutputWorkspace=samXqsWs, Target='DeltaE_inFrequency') self.serialize_in_log(samXqsWs) susceptibility_filename = processed_filename.replace( 'sqw', 'Xqw') sapi.SaveNexus(Filename=susceptibility_filename, InputWorkspace=samXqsWs) if self.getProperty('OutputPowderSpectrum').value: self.generatePowderSpectrum()
def generate_plots(run_number, workspace, options=None): """ Generate diagnostics plots """ n_x = int( workspace.getInstrument().getNumberParameter("number-of-x-pixels")[0]) n_y = int( workspace.getInstrument().getNumberParameter("number-of-y-pixels")[0]) # X-TOF plot tof_min = workspace.getTofMin() tof_max = workspace.getTofMax() workspace = api.Rebin(workspace, params="%s, 50, %s" % (tof_min, tof_max)) direct_summed = api.RefRoi(InputWorkspace=workspace, IntegrateY=True, NXPixel=n_x, NYPixel=n_y, ConvertToQ=False, YPixelMin=0, YPixelMax=n_y, OutputWorkspace="direct_summed") signal = np.log10(direct_summed.extractY()) tof_axis = direct_summed.extractX()[0] / 1000.0 x_tof_plot = _plot2d(z=signal, y=np.arange(signal.shape[0]), x=tof_axis, x_label="TOF (ms)", y_label="X pixel", title="r%s" % run_number) # X-Y plot _workspace = api.Integration(workspace) signal = np.log10(_workspace.extractY()) z = np.reshape(signal, (n_x, n_y)) xy_plot = _plot2d(z=z.T, x=np.arange(n_x), y=np.arange(n_y), title="r%s" % run_number) # Count per X pixel integrated = api.Integration(direct_summed) integrated = api.Transpose(integrated) signal_y = integrated.readY(0) signal_x = np.arange(len(signal_y)) peak_pixels = _plot1d(signal_x, signal_y, x_label="X pixel", y_label="Counts", title="r%s" % run_number) # TOF distribution workspace = api.SumSpectra(workspace) signal_x = workspace.readX(0) / 1000.0 signal_y = workspace.readY(0) tof_dist = _plot1d(signal_x, signal_y, x_range=None, x_label="TOF (ms)", y_label="Counts", title="r%s" % run_number) return [xy_plot, x_tof_plot, peak_pixels, tof_dist]
def PyExec(self): config['default.facility'] = "SNS" config['default.instrument'] = self._long_inst self._reflection = REFLECTIONS_DICT[self.getProperty( "ReflectionType").value] self._doIndiv = self.getProperty("DoIndividual").value self._etBins = 1.E-03 * self.getProperty( "EnergyBins").value # micro-eV to mili-eV self._qBins = self.getProperty("MomentumTransferBins").value self._qBins[0] -= self._qBins[ 1] / 2.0 # self._qBins[0] is leftmost bin boundary self._qBins[2] += self._qBins[ 1] / 2.0 # self._qBins[2] is rightmost bin boundary self._noMonNorm = self.getProperty("NoMonitorNorm").value self._maskFile = self.getProperty("MaskFile").value self._groupDetOpt = self.getProperty("GroupDetectors").value self._normalizeToFirst = self.getProperty("NormalizeToFirst").value self._doNorm = self.getProperty("DivideByVanadium").value datasearch = config["datasearch.searcharchive"] if datasearch != "On": config["datasearch.searcharchive"] = "On" # Apply default mask if not supplied by user self._overrideMask = bool(self._maskFile) if not self._overrideMask: config.appendDataSearchDir(DEFAULT_MASK_GROUP_DIR) self._maskFile = self._reflection["mask_file"] sapi.LoadMask(Instrument='BASIS', OutputWorkspace='BASIS_MASK', InputFile=self._maskFile) # Work around length issue _dMask = sapi.ExtractMask('BASIS_MASK') self._dMask = _dMask[1] sapi.DeleteWorkspace(_dMask[0]) ############################ ## Process the Vanadium ## ############################ norm_runs = self.getProperty("NormRunNumbers").value if self._doNorm and bool(norm_runs): if ";" in norm_runs: raise SyntaxError("Normalization does not support run groups") self._normalizationType = self.getProperty( "NormalizationType").value self.log().information("Divide by Vanadium with normalization" + self._normalizationType) # The following steps are common to all types of Vanadium normalization # norm_runs encompasses a single set, thus _getRuns returns # a list of only one item norm_set = self._getRuns(norm_runs, doIndiv=False)[0] normWs = self._sum_and_calibrate(norm_set, extra_extension="_norm") # This rebin integrates counts onto a histogram of a single bin if self._normalizationType == "by detectorID": normRange = self.getProperty("NormWavelengthRange").value self._normRange = [ normRange[0], normRange[1] - normRange[0], normRange[1] ] sapi.Rebin(InputWorkspace=normWs, OutputWorkspace=normWs, Params=self._normRange) # FindDetectorsOutsideLimits to be substituted by MedianDetectorTest sapi.FindDetectorsOutsideLimits(InputWorkspace=normWs, OutputWorkspace="BASIS_NORM_MASK") # additional reduction steps when normalizing by Q slice if self._normalizationType == "by Q slice": self._normWs = self._group_and_SofQW(normWs, self._etBins, isSample=False) if not self._debugMode: sapi.DeleteWorkspace(normWs) # Delete vanadium events file ########################## ## Process the sample ## ########################## self._run_list = self._getRuns(self.getProperty("RunNumbers").value, doIndiv=self._doIndiv) for run_set in self._run_list: self._samWs = self._sum_and_calibrate(run_set) self._samWsRun = str(run_set[0]) # Divide by Vanadium detector ID, if pertinent if self._normalizationType == "by detector ID": # Mask detectors with insufficient Vanadium signal before dividing sapi.MaskDetectors(Workspace=self._samWs, MaskedWorkspace='BASIS_NORM_MASK') sapi.Divide(LHSWorkspace=self._samWs, RHSWorkspace=self._normWs, OutputWorkspace=self._samWs) # additional reduction steps self._samSqwWs = self._group_and_SofQW(self._samWs, self._etBins, isSample=True) if not self._debugMode: sapi.DeleteWorkspace(self._samWs) # delete events file # Divide by Vanadium Q slice, if pertinent if self._normalizationType == "by Q slice": sapi.Divide(LHSWorkspace=self._samSqwWs, RHSWorkspace=self._normWs, OutputWorkspace=self._samSqwWs) # Clear mask from reduced file. Needed for binary operations # involving this S(Q,w) sapi.ClearMaskFlag(Workspace=self._samSqwWs) # Scale so that elastic line has Y-values ~ 1 if self._normalizeToFirst: self._ScaleY(self._samSqwWs) # Transform the vertical axis to point data sapi.Transpose( InputWorkspace=self._samSqwWs, OutputWorkspace=self._samSqwWs) # Q-values are in X-axis now sapi.ConvertToPointData( InputWorkspace=self._samSqwWs, OutputWorkspace=self._samSqwWs) # from histo to point sapi.Transpose(InputWorkspace=self._samSqwWs, OutputWorkspace=self._samSqwWs ) # Q-values back to vertical axis # Output Dave and Nexus files extension = "_divided.dat" if self._doNorm else ".dat" dave_grp_filename = self._makeRunName(self._samWsRun, False) + extension sapi.SaveDaveGrp(Filename=dave_grp_filename, InputWorkspace=self._samSqwWs, ToMicroEV=True) extension = "_divided_sqw.nxs" if self._doNorm else "_sqw.nxs" processed_filename = self._makeRunName(self._samWsRun, False) + extension sapi.SaveNexus(Filename=processed_filename, InputWorkspace=self._samSqwWs) if not self._debugMode: sapi.DeleteWorkspace("BASIS_MASK") # delete the mask if self._doNorm and bool(norm_runs): sapi.DeleteWorkspace("BASIS_NORM_MASK") # delete vanadium mask sapi.DeleteWorkspace(self._normWs) # Delete vanadium S(Q)
def PyExec(self): config['default.facility'] = 'SNS' config['default.instrument'] = self._long_inst self._reflection =\ REFLECTIONS_DICT[self.getProperty('ReflectionType').value] self._doIndiv = self.getProperty('DoIndividual').value # micro-eV to mili-eV self._etBins = 1.E-03 * self.getProperty('EnergyBins').value self._qBins = self.getProperty('MomentumTransferBins').value self._qBins[0] -= self._qBins[1]/2.0 # leftmost bin boundary self._qBins[2] += self._qBins[1]/2.0 # rightmost bin boundary self._MonNorm = self.getProperty('MonitorNorm').value self._maskFile = self.getProperty('MaskFile').value maskfile = self.getProperty('MaskFile').value self._maskFile = maskfile if maskfile else\ pjoin(DEFAULT_MASK_GROUP_DIR, self._reflection['mask_file']) self._groupDetOpt = self.getProperty('GroupDetectors').value self._normalizeToFirst = self.getProperty('NormalizeToFirst').value self._doNorm = self.getProperty('DivideByVanadium').value # retrieve properties pertaining to saving to NXSPE file self._nsxpe_do = self.getProperty('SaveNXSPE').value if self._nsxpe_do: self._nxspe_psi_angle_log = self.getProperty('PsiAngleLog').value self._nxspe_offset = self.getProperty('PsiOffset').value datasearch = config["datasearch.searcharchive"] if datasearch != "On": config["datasearch.searcharchive"] = "On" # Apply default mask if not supplied by user self._overrideMask = bool(self._maskFile) if not self._overrideMask: config.appendDataSearchDir(DEFAULT_MASK_GROUP_DIR) self._maskFile = self._reflection["mask_file"] sapi.LoadMask(Instrument='BASIS', OutputWorkspace='BASIS_MASK', InputFile=self._maskFile) # Work around length issue _dMask = sapi.ExtractMask('BASIS_MASK') self._dMask = _dMask[1] sapi.DeleteWorkspace(_dMask[0]) ############################ ## Process the Vanadium ## ############################ norm_runs = self.getProperty("NormRunNumbers").value if self._doNorm and bool(norm_runs): if ";" in norm_runs: raise SyntaxError("Normalization does not support run groups") self._normalizationType = self.getProperty("NormalizationType").value self.log().information("Divide by Vanadium with normalization" + self._normalizationType) # Following steps common to all types of Vanadium normalization # norm_runs encompasses a single set, thus _getRuns returns # a list of only one item norm_set = self._getRuns(norm_runs, doIndiv=False)[0] normWs = self._sum_and_calibrate(norm_set, extra_extension="_norm") normRange = self.getProperty("NormWavelengthRange").value bin_width = normRange[1] - normRange[0] # This rebin integrates counts onto a histogram of a single bin if self._normalizationType == "by detector ID": self._normRange = [normRange[0], bin_width, normRange[1]] sapi.Rebin(InputWorkspace=normWs, OutputWorkspace=normWs, Params=self._normRange) self._normWs = normWs # FindDetectorsOutsideLimits to be substituted by MedianDetectorTest sapi.FindDetectorsOutsideLimits(InputWorkspace=normWs, LowThreshold=1.0*bin_width, # no count events outside ranges RangeLower=normRange[0], RangeUpper=normRange[1], OutputWorkspace='BASIS_NORM_MASK') # additional reduction steps when normalizing by Q slice if self._normalizationType == "by Q slice": self._normWs = self._group_and_SofQW(normWs, self._etBins, isSample=False) ########################## ## Process the sample ## ########################## self._run_list = self._getRuns(self.getProperty("RunNumbers").value, doIndiv=self._doIndiv) for run_set in self._run_list: self._samWs = self._sum_and_calibrate(run_set) self._samWsRun = str(run_set[0]) # Divide by Vanadium detector ID, if pertinent if self._normalizationType == "by detector ID": # Mask detectors with insufficient Vanadium signal before dividing sapi.MaskDetectors(Workspace=self._samWs, MaskedWorkspace='BASIS_NORM_MASK') sapi.Divide(LHSWorkspace=self._samWs, RHSWorkspace=self._normWs, OutputWorkspace=self._samWs) # additional reduction steps self._samSqwWs = self._group_and_SofQW(self._samWs, self._etBins, isSample=True) if not self._debugMode: sapi.DeleteWorkspace(self._samWs) # delete events file # Divide by Vanadium Q slice, if pertinent if self._normalizationType == "by Q slice": sapi.Divide(LHSWorkspace=self._samSqwWs, RHSWorkspace=self._normWs, OutputWorkspace=self._samSqwWs) # Clear mask from reduced file. Needed for binary operations # involving this S(Q,w) sapi.ClearMaskFlag(Workspace=self._samSqwWs) # Scale so that elastic line has Y-values ~ 1 if self._normalizeToFirst: self._ScaleY(self._samSqwWs) # Transform the vertical axis (Q) to point data # Q-values are in X-axis now sapi.Transpose(InputWorkspace=self._samSqwWs, OutputWorkspace=self._samSqwWs) # from histo to point sapi.ConvertToPointData(InputWorkspace=self._samSqwWs, OutputWorkspace=self._samSqwWs) # Q-values back to vertical axis sapi.Transpose(InputWorkspace=self._samSqwWs, OutputWorkspace=self._samSqwWs) self.serialize_in_log(self._samSqwWs) # store the call # Output Dave and Nexus files extension = "_divided.dat" if self._doNorm else ".dat" dave_grp_filename = self._makeRunName(self._samWsRun, False) +\ extension sapi.SaveDaveGrp(Filename=dave_grp_filename, InputWorkspace=self._samSqwWs, ToMicroEV=True) extension = "_divided_sqw.nxs" if self._doNorm else "_sqw.nxs" processed_filename = self._makeRunName(self._samWsRun, False) +\ extension sapi.SaveNexus(Filename=processed_filename, InputWorkspace=self._samSqwWs) # additional output if self.getProperty("OutputSusceptibility").value: temperature = mtd[self._samSqwWs].getRun().\ getProperty(TEMPERATURE_SENSOR).getStatistics().mean samXqsWs = self._samSqwWs.replace("sqw", "Xqw") sapi.ApplyDetailedBalance(InputWorkspace=self._samSqwWs, OutputWorkspace=samXqsWs, Temperature=str(temperature)) sapi.ConvertUnits(InputWorkspace=samXqsWs, OutputWorkspace=samXqsWs, Target="DeltaE_inFrequency", Emode="Indirect") self.serialize_in_log(samXqsWs) susceptibility_filename = processed_filename.replace("sqw", "Xqw") sapi.SaveNexus(Filename=susceptibility_filename, InputWorkspace=samXqsWs) if not self._debugMode: sapi.DeleteWorkspace("BASIS_MASK") # delete the mask if self._doNorm and bool(norm_runs): sapi.DeleteWorkspace("BASIS_NORM_MASK") # delete vanadium mask sapi.DeleteWorkspace(self._normWs) # Delete vanadium S(Q) if self._normalizationType == "by Q slice": sapi.DeleteWorkspace(normWs) # Delete vanadium events file if self.getProperty("ExcludeTimeSegment").value: sapi.DeleteWorkspace('splitter') [sapi.DeleteWorkspace(name) for name in ('splitted_unfiltered', 'TOFCorrectWS') if AnalysisDataService.doesExist(name)]