def plot_graphs(self, i, reduced_files_path, base_output_name, output_workspace, plot_2D, plot1Dgraph): if mantidplot: if self.reduce_2D: plot2Dgraph = mantidplot.plot2D(base_output_name) n_2D = output_workspace.name() + '.png' SavePlot = os.path.join(os.path.expanduser(reduced_files_path), n_2D) plot2Dgraph.export(SavePlot) print("2D File Exists:{}".format(os.path.exists(SavePlot))) # is there more elegant way to do it? # Problem is that plot2Dgraph creates and plot the graph file at the same time... if not plot_2D: plot2Dgraph.close() else: if i == 0: # to create first graph to stick all the rest to it; perhaps there is more elegant way # of creating initial empty handler, but I am not aware ... yet plot1Dgraph = mantidplot.plotSpectrum(base_output_name, 0, distribution=mantidplot.DistrFlag.DistrFalse, clearWindow=False) else: mantidplot.mergePlots(plot1Dgraph, mantidplot.plotSpectrum(base_output_name, 0, distribution=mantidplot.DistrFlag.DistrFalse, clearWindow=False))
def update(self): """ Update data members according to reduction results """ if IS_IN_MANTIDPLOT: from reduction_workflow.command_interface import ReductionSingleton self.log_text = ReductionSingleton().log_text try: if ( hasattr(ReductionSingleton(), "output_workspaces") and len(ReductionSingleton().output_workspaces) > 0 ): for item in ReductionSingleton().output_workspaces: mantidplot.plotSpectrum(item, 0, True) else: iq_plots = [] for item in ReductionSingleton()._data_files.keys(): for ws in AnalysisDataService.Instance().getObjectNames(): if ws.startswith(item) and ws.endswith("_Iq"): iq_plots.append(ws) if len(iq_plots) > 0: mantidplot.plotSpectrum(iq_plots, 0, True) except: raise RuntimeError, "Could not plot resulting output\n %s" % sys.exc_value
def PhiRanges(phis, plot=True): """ Given a list of phi ranges [a, b, c, d] it reduces in the phi ranges a-b and c-d @param phis: the list of phi ranges @param plot: set this to true to plot the result (must be run in Mantid), default is true """ print_message('PhiRanges( %s,plot=%s)' % (str(phis), plot)) # todo covert their string into Python array if len(phis) % 2 != 0: raise RuntimeError('Phi ranges must be given as pairs') reduced_workspace_names = [] for i in range(0, len(phis), 2): SetPhiLimit(phis[i], phis[i + 1]) reduced_workspace_name = WavRangeReduction() reduced_workspace_names.append(reduced_workspace_name) if plot and mantidplot: mantidplot.plotSpectrum(reduced_workspace_names, 0) # Return just the workspace name of the full range return reduced_workspace_names[0]
def CompWavRanges(wavelens, plot=True, combineDet=None, resetSetup=True): """ Compares the momentum transfer results calculated from different wavelength ranges. Given the list of wave ranges [a, b, c] it reduces for wavelengths a-b, b-c and a-c. @param wavelens: the list of wavelength ranges @param plot: set this to true to plot the result (must be run in Mantid), default is true @param combineDet: see description in WavRangeReduction @param resetSetup: if true reset setup at the end """ _printMessage('CompWavRanges( %s,plot=%s)'%(str(wavelens),plot)) #this only makes sense for 1D reductions if ReductionSingleton().to_Q.output_type == '2D': issueWarning('This wave ranges check is a 1D analysis, ignoring 2D setting') _printMessage('Set1D()') ReductionSingleton().to_Q.output_type = '1D' if type(wavelens) != type([]) or len(wavelens) < 2: if type(wavelens) != type((1,)): raise RuntimeError('Error CompWavRanges() requires a list of wavelengths between which reductions will be performed.') calculated = [WavRangeReduction(wav_start=wavelens[0], wav_end=wavelens[len(wavelens)-1], combineDet=combineDet,resetSetup=False)] for i in range(0, len(wavelens)-1): calculated.append(WavRangeReduction(wav_start=wavelens[i], wav_end=wavelens[i+1], combineDet=combineDet, resetSetup=False)) if resetSetup: _refresh_singleton() if plot: mantidplot.plotSpectrum(calculated, 0) #return just the workspace name of the full range return calculated[0]
def PhiRanges(phis, plot=True): """ Given a list of phi ranges [a, b, c, d] it reduces in the phi ranges a-b and c-d @param phis: the list of phi ranges @param plot: set this to true to plot the result (must be run in Mantid), default is true """ _printMessage('PhiRanges( %s,plot=%s)'%(str(phis),plot)) #todo covert their string into Python array if len(phis)/2 != float(len(phis))/2.: raise RuntimeError('Phi ranges must be given as pairs') try: #run the reductions, calculated will be an array with the names of all the workspaces produced calculated = [] for i in range(0, len(phis), 2): SetPhiLimit(phis[i],phis[i+1]) #reducedResult = ReductionSingleton()._reduce() #RenameWorkspace(reducedResult,'bob') #calculated.append(reducedResult) calculated.append(ReductionSingleton()._reduce()) ReductionSingleton.replace(ReductionSingleton().settings()) finally: _refresh_singleton() if plot: mantidplot.plotSpectrum(calculated, 0) #return just the workspace name of the full range return calculated[0]
def plot_graphs(self, i, reduced_files_path, base_output_name, output_workspace, plot_2D, plot1Dgraph): if mantidplot: if self.reduce_2D: plot2Dgraph = mantidplot.plot2D(base_output_name) n_2D = output_workspace.name() + '.png' SavePlot = os.path.join(os.path.expanduser(reduced_files_path), n_2D) plot2Dgraph.export(SavePlot) print("2D File Exists:{}".format(os.path.exists(SavePlot))) # is there more elegant way to do it? # Problem is that plot2Dgraph creates and plot the graph file at the same time... if not plot_2D: plot2Dgraph.close() else: if i == 0: # to create first graph to stick all the rest to it; perhaps there is more elegant way # of creating initial empty handler, but I am not aware ... yet plot1Dgraph = mantidplot.plotSpectrum( base_output_name, 0, distribution=mantidplot.DistrFlag.DistrFalse, clearWindow=False) else: mantidplot.mergePlots( plot1Dgraph, mantidplot.plotSpectrum( base_output_name, 0, distribution=mantidplot.DistrFlag.DistrFalse, clearWindow=False))
def eval_absorption_corrections(self, test_ws=None): """ The method to evaluate the speed and efficiency of the absorption corrections procedure, before applying your corrections to the whole workspace and all sample runs. The absorption correction procedure invoked with excessive accuracy can run for too long providing no real improvements in accuracy. This is why it is recommended to run this procedure evaluating absorption on selected detectors and deploy the corrections to the whole runs only after achieving satisfactory accuracy and execution time. The procedure evaluate and prints the expected time to run the absorption corrections on the whole run. Input: If provided, the pointer or the name of the workspace available in analysis data service. If it is not, the workspace is taken from PropertyManager.sample_run property Usage: Reduce single run and uncomment this method in the __main__ area to evaluate adsorption corrections. Change adsorption corrections parameters below to achieve best speed and acceptable accuracy """ # Gain access to the property manager: propman = rd.reducer.prop_man # Set up Sample as one of: # 1) Cylinder([Chem_formula],[Height,Radius]) # 2) FlatPlate([Chem_formula],[Height,Width,Thick]) # 3) HollowCylinder([Chem_formula],[Height,InnerRadius,OuterRadius]) # 4) Sphere([[Chem_formula],Radius) # The units are in cm propman.correct_absorption_on = Cylinder( 'Fe', [10, 2]) # Will be taken from def_advanced_properties # prop['correct_absorption_on'] = if not defined here # # Use Monte-Carlo integration. Take sparse energy points and a few integration attempts # to increase initial speed. Increase these numbers to achieve better accuracy. propman.abs_corr_info = { 'EventsPerPoint': 3000 } #,'NumberOfWavelengthPoints':30} # See MonteCarloAbsorption for all possible properties description and possibility to define # a sparse instrument for speed. # # Gain access to the workspace. The workspace should contain Ei log, containing incident energy # (or be reduced) if test_ws is None: test_ws = PropertyManager.sample_run.get_workspace() # Define spectra list to test absorption on. Empty list will # define absorption on the whole workspace. check_spectra = [1, 200] # Evaluate corrections on the selected spectra of the workspace and the time to obtain # the corrections on the whole workspace. corrections, time_to_correct_abs = self.evaluate_abs_corrections( test_ws, check_spectra) # When accuracy and speed of the corrections is satisfactory, copy chosen abs_corr_info # properties from above to the advanced_porperties area to run in reduction. if mpl is not None: n_spectra = len(check_spectra) if n_spectra == 0: n_specra = corrections.getNumberHistograms() mpl.plotSpectrum(corrections, range(0, n_spectra)) # return corrections
def plot(self, *args): """Plot a spectrum. Parameters are the same as in getSpectrum(...) with additional name argument""" from mantidplot import plotSpectrum ws_name = args[3] if len(args) == 4 else 'CrystalFieldMultiSite_{}'.format(self.Ions) xArray, yArray = self.getSpectrum(*args) ws_name += '_{}'.format(args[0]) if isinstance(args[0], int): ws_name += '_{}'.format(args[1]) makeWorkspace(xArray, yArray, child=False, ws_name=ws_name) plotSpectrum(ws_name, 0)
def update(self): """ Update data members according to reduction results """ if IS_IN_MANTIDPLOT: self.log_text = ReductionSingleton().log_text try: for item in ReductionSingleton().output_workspaces: mantidplot.plotSpectrum(item, 0, True) except: raise RuntimeError, "Could not plot resulting output\n %s" % sys.exc_value
def plot_graph(workspace): if IN_MANTIDPLOT: return mantidplot.plotSpectrum(workspace, 0) elif not PYQT4: if not isinstance(workspace, list): workspace = [workspace] plot(workspace, wksp_indices=[0])
def PlotResult(workspace, canvas=None): """ Draws a graph of the passed workspace. If the workspace is 2D (has many spectra a contour plot is written @param workspace: a workspace name or handle to plot @param canvas: optional handle to an existing graph to write the plot to @return: a handle to the graph that was written to """ #ensure that we are dealing with a workspace handle rather than its name workspace = mtd[str(workspace)] if workspace.isGroup(): numSpecs = workspace[0].getNumberHistograms() else: numSpecs = workspace.getNumberHistograms() try: if numSpecs == 1: graph = mantidplot.plotSpectrum(workspace,0) else: graph = mantidplot.importMatrixWorkspace(workspace.getName()).plotGraph2D() except NameError: issueWarning('Plot functions are not available, is this being run from outside Mantidplot?') if not canvas is None: #we were given a handle to an existing graph, use it mantidplot.mergePlots(canvas, graph) graph = canvas return graph
def _plot_quartiles(output_workspaces, sample_scatter): title = '{}_beam_centre_finder'.format(sample_scatter) graph_handle = mantidplot.plotSpectrum(output_workspaces, 0) graph_handle.activeLayer().logLogAxes() graph_handle.activeLayer().setTitle(title) graph_handle.setName(title) return graph_handle
def plot_result(self): """ Plot the scaled data sets """ low_xmin = util._check_and_get_float_line_edit(self._content.low_min_edit) low_xmax = util._check_and_get_float_line_edit(self._content.low_max_edit) med_xmin = util._check_and_get_float_line_edit(self._content.medium_min_edit) med_xmax = util._check_and_get_float_line_edit(self._content.medium_max_edit) ws_list = [] if self._low_q_data is not None: xmin,_ = self._low_q_data.get_skipped_range() self._low_q_data.apply_scale(xmin, low_xmax) ws_list.append(self._low_q_data.get_scaled_ws()) if self._medium_q_data is not None: _,xmax = self._medium_q_data.get_skipped_range() if self._high_q_data is not None: xmax = med_xmax self._medium_q_data.apply_scale(low_xmin, xmax) ws_list.append(self._medium_q_data.get_scaled_ws()) if self._high_q_data is not None: _,xmax = self._high_q_data.get_skipped_range() self._high_q_data.apply_scale(med_xmin, xmax) ws_list.append(self._high_q_data.get_scaled_ws()) if len(ws_list)>0: g = mantidplot.graph(self._graph) if g is None or not self._plotted: g = mantidplot.plotSpectrum(ws_list, [0], True) g.setName(self._graph) self._plotted = True
def plot_result(self): """ Plot the scaled data sets """ low_xmin = util._check_and_get_float_line_edit(self._content.low_min_edit) low_xmax = util._check_and_get_float_line_edit(self._content.low_max_edit) med_xmin = util._check_and_get_float_line_edit(self._content.medium_min_edit) med_xmax = util._check_and_get_float_line_edit(self._content.medium_max_edit) ws_list = [] if self._low_q_data is not None: xmin, _ = self._low_q_data.get_skipped_range() self._low_q_data.apply_scale(xmin, low_xmax) ws_list.append(self._low_q_data.get_scaled_ws()) if self._medium_q_data is not None: _, xmax = self._medium_q_data.get_skipped_range() if self._high_q_data is not None: xmax = med_xmax self._medium_q_data.apply_scale(low_xmin, xmax) ws_list.append(self._medium_q_data.get_scaled_ws()) if self._high_q_data is not None: _, xmax = self._high_q_data.get_skipped_range() self._high_q_data.apply_scale(med_xmin, xmax) ws_list.append(self._high_q_data.get_scaled_ws()) if len(ws_list) > 0: g = mantidplot.graph(self._graph) if g is None or not self._plotted: g = mantidplot.plotSpectrum(ws_list, [0], True) g.setName(self._graph) self._plotted = True
def PlotResult(workspace, canvas=None): """ Draws a graph of the passed workspace. If the workspace is 2D (has many spectra a contour plot is written @param workspace: a workspace name or handle to plot @param canvas: optional handle to an existing graph to write the plot to @return: a handle to the graph that was written to """ raise NotImplementedError(_plot_missing_str) try: import mantidplot workspace = AnalysisDataService.retrieve(str(workspace)) number_of_spectra = workspace[0].getNumberHistograms() if isinstance(workspace, WorkspaceGroup) else\ workspace.getNumberHistograms() graph = mantidplot.plotSpectrum(workspace, 0) if number_of_spectra == 1 else \ mantidplot.importMatrixWorkspace(workspace.name()).plotGraph2D() if canvas is not None: # we were given a handle to an existing graph, use it mantidplot.mergePlots(canvas, graph) graph = canvas return graph except ImportError: print_message('Plot functions are not available, is this being run from outside Mantidplot?')
def plot(workspaces, **kwargs): """ General plotting command. @param workspaces A workspace or list of workspaces references holding the data (string names will work too) Accepted Keywords: ================== The following are exclusive options and only a single one is allowed to be specified at one time: spectra: An int or list of ints specifying the spectra numbers to plot angles: A list or tuple of length 2 defining the min/max angles in degrees to plot (inclusive) bank: A bank number between 1-8 (inclusive) defining the spectra to plot If multiple workspaces are given the ranges are applied separately to each. The following are general and apply to all plots types: errors: If True then error bars are drawn on the plot sum: If supplied and True the given input range on each workspace is summed and a single curve for each workspace is generated fig: If supplied this window will be used to do the plot rather than creating a new one clrfig: If True then clear the given window before plotting the user input """ raise_error_if_not_in_gui("plot") # Work with a list of workspace references only workspaces = to_workspace_list(workspaces) # parse input range_kw, input_range = check_input(workspaces, **kwargs) do_sum = kwargs.get("sum", False) with_errors = kwargs.get("errors", False) plot_win = kwargs.get("fig", None) clrfig = kwargs.get("clrfig", True) # Get the function that will do the transform from user input to workspace indices range_transform_func, range_src = get_transform_func(range_kw, input_range) if do_sum: process_func = sum_range else: def pass_through(ws, rng, *args): return ws, rng process_func = pass_through plot_src = [] for user_ws in workspaces: # Translate range inputs on each workspace to indices indices = range_transform_func(user_ws, range_src) plot_ws, indices = process_func(user_ws, indices, range_kw, input_range) plot_src.append((plot_ws, indices)) # now plot from mantidplot import plotSpectrum for wksp, indices in plot_src: plot_win = plotSpectrum(wksp, indices, error_bars = with_errors, window = plot_win, clearWindow = clrfig) if clrfig: # If clrfig was true then we only want it to clear for the first plot clrfig = False return plot_win
def update(self): """ Update data members according to reduction results """ if IS_IN_MANTIDPLOT: # Allow for the old reducer, which uses Python API v1 if self.PYTHON_API==1: from reduction.command_interface import ReductionSingleton else: from reduction_workflow.command_interface import ReductionSingleton self.log_text = ReductionSingleton().log_text try: if hasattr(ReductionSingleton(), "output_workspaces"): for item in ReductionSingleton().output_workspaces: mantidplot.plotSpectrum(item, 0, True) except: raise RuntimeError, "Could not plot resulting output\n %s" % sys.exc_value
def _plot(self): """ Plots results. """ if self._plot_type == 'spectra': from mantidplot import plotSpectrum num_spectra = mtd[self._plot_ws].getNumberHistograms() try: plotSpectrum(self._plot_ws, range(0, num_spectra)) except RuntimeError: logger.notice('Spectrum plotting canceled by user') if self._plot_type == 'contour': from mantidplot import importMatrixWorkspace plot_workspace = importMatrixWorkspace(self._plot_ws) plot_workspace.plotGraph2D()
def CompWavRanges(wavelens, plot=True, combineDet=None, resetSetup=True): """ Compares the momentum transfer results calculated from different wavelength ranges. Given the list of wave ranges [a, b, c] it reduces for wavelengths a-b, b-c and a-c. @param wavelens: the list of wavelength ranges @param plot: set this to true to plot the result (must be run in Mantid), default is true @param combineDet: see description in WavRangeReduction @param resetSetup: if true reset setup at the end """ print_message('CompWavRanges( %s,plot=%s)' % (str(wavelens), plot)) if not isinstance(wavelens, list) or len(wavelens) < 2: if not isinstance(wavelens, tuple): raise RuntimeError( 'Error CompWavRanges() requires a list of wavelengths between which ' 'reductions will be performed.') # Perform a reduction over the full wavelength range which was specified reduced_workspace_names = [] for index in range(len(wavelens)): wavelens[index] = float(wavelens[index]) full_reduction_name = WavRangeReduction(wav_start=wavelens[0], wav_end=wavelens[-1], combineDet=combineDet, resetSetup=False) reduced_workspace_names.append(full_reduction_name) # Reduce each wavelength slice for i in range(0, len(wavelens) - 1): reduced_workspace_name = WavRangeReduction(wav_start=wavelens[i], wav_end=wavelens[i + 1], combineDet=combineDet, resetSetup=False) reduced_workspace_names.append(reduced_workspace_name) if plot and mantidplot: mantidplot.plotSpectrum(reduced_workspace_names, 0) # Return just the workspace name of the full range return reduced_workspace_names[0]
def _plot_data_mantidplot(self, fig_window, data): from mantidplot import plotSpectrum for i, plot_info in enumerate(data): distr_state = self._get_distr_state_mantid_plot( plot_info.normalised) if self.number_of_axes == 1: plotSpectrum(plot_info.workspace, plot_info.specNum - 1, distribution=distr_state, window=fig_window) else: lay = fig_window.layer(plot_info.axis + 1) fig_window.setActiveLayer(lay) plotSpectrum(plot_info.workspace, plot_info.specNum - 1, distribution=distr_state, window=fig_window, type=0) if self.number_of_axes != 1: fig_window.arrangeLayers(False, False)
def plot_reduction(workspace_name, plot_type): """ Plot a given workspace based on the Plot property. @param workspace_name Name of workspace to plot @param plot_type Type of plot to create """ if plot_type == 'Spectra' or plot_type == 'Both': from mantidplot import plotSpectrum num_spectra = mtd[workspace_name].getNumberHistograms() try: plotSpectrum(workspace_name, range(0, num_spectra)) except RuntimeError: logger.notice('Spectrum plotting canceled by user') can_plot_contour = mtd[workspace_name].getNumberHistograms() > 1 if (plot_type == 'Contour' or plot_type == 'Both') and can_plot_contour: from mantidplot import importMatrixWorkspace plot_workspace = importMatrixWorkspace(workspace_name) plot_workspace.plotGraph2D()
def plot(self, i=0, workspace=None, ws_index=0, name=None): """Plot a spectrum. Parameters are the same as in getSpectrum(...)""" from mantidplot import plotSpectrum from mantid.api import AlgorithmManager createWS = AlgorithmManager.createUnmanaged('CreateWorkspace') createWS.initialize() xArray, yArray = self.getSpectrum(i, workspace, ws_index) ws_name = name if name is not None else 'CrystalField_%s' % self._ion if isinstance(i, int): if workspace is None: if i > 0: ws_name += '_%s' % i createWS.setProperty('DataX', xArray) createWS.setProperty('DataY', yArray) createWS.setProperty('OutputWorkspace', ws_name) createWS.execute() plot_window = self._plot_window[ i] if i in self._plot_window else None self._plot_window[i] = plotSpectrum(ws_name, 0, window=plot_window, clearWindow=True) else: ws_name += '_%s' % workspace if i > 0: ws_name += '_%s' % i createWS.setProperty('DataX', xArray) createWS.setProperty('DataY', yArray) createWS.setProperty('OutputWorkspace', ws_name) createWS.execute() plotSpectrum(ws_name, 0) else: ws_name += '_%s' % i createWS.setProperty('DataX', xArray) createWS.setProperty('DataY', yArray) createWS.setProperty('OutputWorkspace', ws_name) createWS.execute() plotSpectrum(ws_name, 0)
def _plot_output(self): workspace_names = [ self._scan_ws + '_el_eq1', self._scan_ws + '_inel_eq1', self._scan_ws + '_total_eq1', self._scan_ws + '_el_eq2', self._scan_ws + '_inel_eq2', self._scan_ws + '_total_eq2', self._scan_ws + '_eisf' ] try: from mantidplot import plotSpectrum for workspace_name in workspace_names: plotSpectrum(workspace_name, 0, error_bars=True) if self._msd_fit: plotSpectrum(self._scan_ws + '_msd', 1, error_bars=True) if self._width_fit: plotSpectrum(self._output_ws + '_Diffusion', 0, error_bars=True) except ImportError: from mantidqt.plotting.functions import plot plot(workspace_names, wksp_indices=[0] * len(workspace_names), errors=True) if self._msd_fit: plot([self._scan_ws + '_msd'], wksp_indices=[1], errors=True) if self._width_fit: plot([self._output_ws + '_Diffusion'], wksp_indices=[0], errors=True)
def CompWavRanges(wavelens, plot=True, combineDet=None, resetSetup=True): """ Compares the momentum transfer results calculated from different wavelength ranges. Given the list of wave ranges [a, b, c] it reduces for wavelengths a-b, b-c and a-c. @param wavelens: the list of wavelength ranges @param plot: set this to true to plot the result (must be run in Mantid), default is true @param combineDet: see description in WavRangeReduction @param resetSetup: if true reset setup at the end """ print_message('CompWavRanges( %s,plot=%s)' % (str(wavelens), plot)) if not isinstance(wavelens, list) or len(wavelens) < 2: if not isinstance(wavelens, tuple): raise RuntimeError( 'Error CompWavRanges() requires a list of wavelengths between which ' 'reductions will be performed.') # Perform a reduction over the full wavelength range which was specified reduced_workspace_names = [] for index in range(len(wavelens)): wavelens[index] = float(wavelens[index]) full_reduction_name = WavRangeReduction(wav_start=wavelens[0], wav_end=wavelens[- 1], combineDet=combineDet, resetSetup=False) reduced_workspace_names.append(full_reduction_name) # Reduce each wavelength slice for i in range(0, len(wavelens) - 1): reduced_workspace_name = WavRangeReduction(wav_start=wavelens[i], wav_end=wavelens[i + 1], combineDet=combineDet, resetSetup=False) reduced_workspace_names.append(reduced_workspace_name) if plot and mantidplot: mantidplot.plotSpectrum(reduced_workspace_names, 0) # Return just the workspace name of the full range return reduced_workspace_names[0]
def CompWavRanges(wavelens, plot=True): """ Compares the momentum transfer results calculated from different wavelength ranges. Given the list of wave ranges [a, b, c] it reduces for wavelengths a-b, b-c and a-c. @param wavelens: the list of wavelength ranges @param plot: set this to true to plot the result (must be run in Mantid), default is true """ _printMessage('CompWavRanges( %s,plot=%s)'%(str(wavelens),plot)) #this only makes sense for 1D reductions if ReductionSingleton().to_Q.output_type == '2D': issueWarning('This wave ranges check is a 1D analysis, ignoring 2D setting') _printMessage('Set1D()') ReductionSingleton().to_Q.output_type = '1D' if type(wavelens) != type([]) or len(wavelens) < 2: if type(wavelens) != type((1,)): raise RuntimeError('Error CompWavRanges() requires a list of wavelengths between which reductions will be performed.') try: ReductionSingleton().to_wavelen.set_rebin(w_low=wavelens[0], w_high=wavelens[len(wavelens)-1]) #run the reductions, calculated will be an array with the names of all the workspaces produced calculated = [ReductionSingleton()._reduce()] for i in range(0, len(wavelens)-1): ReductionSingleton.replace(ReductionSingleton().settings()) ReductionSingleton().to_wavelen.set_rebin( w_low=wavelens[i], w_high=wavelens[i+1]) calculated.append(ReductionSingleton()._reduce()) finally: _refresh_singleton() if plot: mantidplot.plotSpectrum(calculated, 0) #return just the workspace name of the full range return calculated[0]
def update(self): """ Update data members according to reduction results """ if IS_IN_MANTIDPLOT: from reduction_workflow.command_interface import ReductionSingleton self.log_text = ReductionSingleton().log_text try: if hasattr(ReductionSingleton(), "output_workspaces") \ and len(ReductionSingleton().output_workspaces)>0: for item in ReductionSingleton().output_workspaces: mantidplot.plotSpectrum(item, 0, True) else: iq_plots = [] for item in ReductionSingleton()._data_files.keys(): for ws in AnalysisDataService.Instance( ).getObjectNames(): if ws.startswith(item) and ws.endswith('_Iq'): iq_plots.append(ws) if len(iq_plots) > 0: mantidplot.plotSpectrum(iq_plots, 0, True) except: raise RuntimeError, "Could not plot resulting output\n %s" % sys.exc_value
def PyExec(self): import mantidplot as mp workdir = config['defaultsave.directory'] self._setup() DeconApplyCorrections(DataWorkspace=self._stheta, DerivativesWorkspace=self._deriv, MomentsWorkspace=self._mome, UseSmoothData=self._smooth, NumberTerms=self._nterms, Cutoff=self._cutoff, CutoffPoint=self._cutoff_pt) DeconD4Result(CorrectedWorkspace=self._stheta + '_corrected', QWorkspace=self._sofq, RebinOption='None', RebinQrange=self._rebin_qrange, RebinQinc=self._rebin_qinc) if self._plot: mp.plotSpectrum(self._stheta + '_coeff', [0,1,2,3]) mp.plotSpectrum(self._stheta + '_corr', [0,1,2,3]) result_graph=mp.plotSpectrum(self._stheta + '_result', [0,1,2,3], False) mp.mergePlots(result_graph,mp.plotSpectrum(self._stheta + '_used', 0, False)) self._plot_result([self._stheta + '_used', self._stheta + '_corrected'], 0) finalQ_graph=mp.plotSpectrum(self._sofq + '_corrected', 0, False) finalQ_layer = finalQ_graph.activeLayer() finalQ_layer.setAxisTitle(mp.Layer.Bottom, 'Q') mp.mergePlots(finalQ_graph,mp.plotSpectrum(self._sofq, 0, False)) if self._saveNXS: save_nxs_prog = Progress(self, start=0.0, end=0.8, nreports=4) save_nxs_prog.report('Save NXS ') self._save_ws(self._stheta + '_coeff', 'Coefficients') self._save_ws(self._stheta + '_corr', 'Corrections') self._save_ws(self._stheta + '_result', 'Result') self._save_ws(self._stheta + '_corrected', 'Final theta corrected') self._save_ws(self._sofq + '_corrected', 'Final Q corrected') save_nxs_prog.report('Save NXS completed') if self._saveAscii: save_ascii_prog = Progress(self, start=0.0, end=0.8, nreports=3) save_ascii_prog.report('Save ascii ') self._save_Ascii(self._stheta + '_corrected', '.stc') self._save_Ascii(self._sofq + '_corrected', '.sqc') save_ascii_prog.report('Save ascii completed')
def plot(self, i=0, workspace=None, ws_index=0, name=None): """Plot a spectrum. Parameters are the same as in getSpectrum(...)""" from mantidplot import plotSpectrum from mantid.api import AlgorithmManager createWS = AlgorithmManager.createUnmanaged('CreateWorkspace') createWS.initialize() xArray, yArray = self.getSpectrum(i, workspace, ws_index) ws_name = name if name is not None else 'CrystalField_%s' % self._ion if isinstance(i, int): if workspace is None: if i > 0: ws_name += '_%s' % i createWS.setProperty('DataX', xArray) createWS.setProperty('DataY', yArray) createWS.setProperty('OutputWorkspace', ws_name) createWS.execute() plot_window = self._plot_window[i] if i in self._plot_window else None self._plot_window[i] = plotSpectrum(ws_name, 0, window=plot_window, clearWindow=True) else: ws_name += '_%s' % workspace if i > 0: ws_name += '_%s' % i createWS.setProperty('DataX', xArray) createWS.setProperty('DataY', yArray) createWS.setProperty('OutputWorkspace', ws_name) createWS.execute() plotSpectrum(ws_name, 0) else: ws_name += '_%s' % i createWS.setProperty('DataX', xArray) createWS.setProperty('DataY', yArray) createWS.setProperty('OutputWorkspace', ws_name) createWS.execute() plotSpectrum(ws_name, 0)
def connect(self, ws, call_back, xmin=None, xmax=None, range_min=None, range_max=None, x_title=None, log_scale=False, ws_output_base=None): if not IS_IN_MANTIDPLOT: print "RangeSelector cannot be used output MantidPlot" return self._call_back = call_back self._ws_output_base = ws_output_base mantidplot.app.connect( mantidplot.app.mantidUI, QtCore.SIGNAL("x_range_update(double,double)"), self._call_back) g = mantidplot.graph(self._graph) if g is not None: g.close() g = mantidplot.plotSpectrum(ws, [0], True) g.setName(self._graph) l = g.activeLayer() try: title = ws[0].replace("_", " ") title.strip() except: title = " " l.setTitle(" ") l.setCurveTitle(0, title) if log_scale: l.logYlinX() if x_title is not None: l.setXTitle(x_title) if xmin is not None and xmax is not None: l.setScale(2, xmin, xmax) if range_min is not None and range_max is not None: mantidplot.selectMultiPeak(g, False, range_min, range_max) else: mantidplot.selectMultiPeak(g, False)
def connect(self, ws, call_back, xmin=None, xmax=None, range_min=None, range_max=None, x_title=None, log_scale=False, ws_output_base=None): if not IS_IN_MANTIDPLOT: print "RangeSelector cannot be used output MantidPlot" return self._call_back = call_back self._ws_output_base = ws_output_base mantidplot.app.connect(mantidplot.app.mantidUI, QtCore.SIGNAL("x_range_update(double,double)"), self._call_back) g = mantidplot.graph(self._graph) if g is not None: g.close() g = mantidplot.plotSpectrum(ws, [0], True) g.setName(self._graph) l=g.activeLayer() try: title = ws[0].replace("_"," ") title.strip() except: title = " " l.setTitle(" ") l.setCurveTitle(0, title) if log_scale: l.logYlinX() if x_title is not None: l.setXTitle(x_title) if xmin is not None and xmax is not None: l.setScale(2,xmin,xmax) if range_min is not None and range_max is not None: mantidplot.selectMultiPeak(g, False, range_min, range_max) else: mantidplot.selectMultiPeak(g, False)
def plot_result(self): """ Plot the scaled data sets """ pol_dict = { "Off Off": ReflData.OFF_OFF, "On Off": ReflData.ON_OFF, "Off On": ReflData.OFF_ON, "On On": ReflData.ON_ON } for pol in pol_dict.keys(): s = Stitcher() ws_list = [] for item in self._workspace_list: d = item.get_user_data(pol_dict[pol]) if d is not None: ws_list.append(d.get_scaled_ws()) s.append(d) if len(ws_list) > 0: combined_ws = "ref_%s" % pol.replace(" ", "_") if self._settings.instrument_name == "REFL": combined_ws = "ref_combined" s.get_scaled_data(combined_ws) plot_name = '%s: %s' % (self._graph, pol) g = mantidplot.graph(plot_name) if g is not None: continue g = mantidplot.plotSpectrum(ws_list, [0], True) g.setName(plot_name) l = g.activeLayer() l.logYlinX() if self._settings.instrument_name == "REFL": l.setTitle("Reflectivity") else: l.setTitle("Polarization state: %s" % pol)
def PlotResult(workspace, canvas=None): """ Draws a graph of the passed workspace. If the workspace is 2D (has many spectra a contour plot is written @param workspace: a workspace name or handle to plot @param canvas: optional handle to an existing graph to write the plot to @return: a handle to the graph that was written to """ try: import mantidplot workspace = AnalysisDataService.retrieve(str(workspace)) number_of_spectra = workspace[0].getNumberHistograms() if isinstance(workspace, WorkspaceGroup) else\ workspace.getNumberHistograms() graph = mantidplot.plotSpectrum(workspace, 0) if number_of_spectra == 1 else \ mantidplot.importMatrixWorkspace(workspace.getName()).plotGraph2D() if canvas is not None: # we were given a handle to an existing graph, use it mantidplot.mergePlots(canvas, graph) graph = canvas return graph except ImportError: print_message('Plot functions are not available, is this being run from outside Mantidplot?')
def plot_result(self): """ Plot the scaled data sets """ pol_dict = {"Off Off" : ReflData.OFF_OFF, "On Off" : ReflData.ON_OFF, "Off On" : ReflData.OFF_ON, "On On" : ReflData.ON_ON } for pol in pol_dict.keys(): s = Stitcher() ws_list = [] for item in self._workspace_list: d = item.get_user_data( pol_dict[pol] ) if d is not None: ws_list.append(d.get_scaled_ws()) s.append(d) if len(ws_list)>0: combined_ws = "ref_%s" % pol.replace(" ", "_") if self._settings.instrument_name == "REFL": combined_ws = "ref_combined" s.get_scaled_data(combined_ws) plot_name = '%s: %s' % (self._graph, pol) g = mantidplot.graph(plot_name) if g is not None: continue g = mantidplot.plotSpectrum(ws_list, [0], True) g.setName(plot_name) l=g.activeLayer() l.logYlinX() if self._settings.instrument_name == "REFL": l.setTitle("Reflectivity") else: l.setTitle("Polarization state: %s" % pol)
def _plot_result(self): import mantidplot as mp mp.plotSpectrum(self._scan_ws + '_el_eq1', 0, error_bars=True) mp.plotSpectrum(self._scan_ws + '_inel_eq1', 0, error_bars=True) mp.plotSpectrum(self._scan_ws + '_total_eq1', 0, error_bars=True) mp.plotSpectrum(self._scan_ws + '_el_eq2', 0, error_bars=True) mp.plotSpectrum(self._scan_ws + '_inel_eq2', 0, error_bars=True) mp.plotSpectrum(self._scan_ws + '_total_eq2', 0, error_bars=True) mp.plotSpectrum(self._scan_ws + '_eisf', 0, error_bars=True) if self._msdfit: mp.plotSpectrum(self._scan_ws + '_msd', 1, error_bars=True) if self._widthfit: mp.plotSpectrum(self._output_ws + '_Diffusion', 0, error_bars=True)
def PyExec(self): from IndirectImport import import_mantidplot mp = import_mantidplot() workdir = config['defaultsave.directory'] # self._setup() q2_workspaces = [] scan_alg = self.createChildAlgorithm("EnergyWindowScan", 0.05, 0.95) for numb in range(self._number_samples): run_numbers = [] run_names = [] first_run = self._run_first + numb for idx in range(int(self._number_runs)): run = str(first_run + idx * self._number_samples) run_numbers.append(run) run_names.append(self._instrument + run) q0 = self._instrument.lower() + run_numbers[0] + '_to_' + run_numbers[-1] + '_s' + str(numb) output_ws = q0 + '_red' scan_ws = q0 + '_scan' scan_alg.setProperty('InputFiles', run_names) scan_alg.setProperty('LoadLogFiles', True) scan_alg.setProperty('CalibrationWorkspace', '') scan_alg.setProperty('Instrument', self._instrument_name) scan_alg.setProperty('Analyser', self._analyser) scan_alg.setProperty('Reflection', self._reflection) scan_alg.setProperty('SpectraRange', self._spectra_range) scan_alg.setProperty('ElasticRange', self._elastic_range) scan_alg.setProperty('InelasticRange', self._inelastic_range) scan_alg.setProperty('TotalRange', self._total_range) scan_alg.setProperty('DetailedBalance', Property.EMPTY_DBL) scan_alg.setProperty('GroupingMethod', 'Individual') scan_alg.setProperty('SampleEnvironmentLogName', self._sample_log_name) scan_alg.setProperty('SampleEnvironmentLogValue', self._sample_log_value) scan_alg.setProperty('msdFit', self._msdfit) scan_alg.setProperty('ReducedWorkspace', output_ws) scan_alg.setProperty('ScanWorkspace', scan_ws) scan_alg.execute() logger.information('OutputWorkspace : %s' % output_ws) logger.information('ScanWorkspace : %s' % scan_ws) q1_ws = scan_ws + '_el_eq1' q2_ws = scan_ws + '_el_eq2' q2_workspaces.append(q2_ws) eisf_ws = scan_ws + '_eisf' el_elt_ws = scan_ws + '_el_elt' inel_elt_ws = scan_ws + '_inel_elt' tot_elt_ws = scan_ws + '_total_elt' msd_ws = scan_ws + '_msd' # output_workspaces = [q1_ws, q2_ws, eisf_ws, el_elt_ws, inel_elt_ws, tot_elt_ws] output_workspaces = [q1_ws, eisf_ws, el_elt_ws, inel_elt_ws, tot_elt_ws] if self._msdfit: output_workspaces.append(msd_ws) if self._plot: for ws in output_workspaces: mp.plotSpectrum(ws, 0, error_bars=True) if self._save: self._save_output(output_workspaces) if self._widthfit: result_workspaces = list() chi_workspaces = list() temperatures = list() # Get input workspaces fit_progress = Progress(self, 0.0, 0.05, 3) input_workspace_names = mtd[output_ws].getNames() x = mtd[input_workspace_names[0]].readX(0) xmin = x[0] xmax = x[len(x) - 1] for input_ws in input_workspace_names: red_ws = input_ws[:-3] + 'red' # Get the sample temperature temp = self._get_temperature(red_ws) if temp is not None: temperatures.append(temp) else: # Get the run number run_no = self._get_InstrRun(input_ws)[1] run_numbers.append(run_no) num_hist = mtd[input_ws].getNumberHistograms() logger.information('Reduced histograms : %i' % num_hist) result = input_ws[:-3] + 'fit' func = 'name=Lorentzian,Amplitude=1.0,PeakCentre=0.0,FWHM=0.01' func += ',constraint=(Amplitude>0.0,FWHM>0.0)' for idx in range(num_hist): fit_progress.report('Fitting workspace: %s ; spectrum %i' % (input_ws, idx)) IndirectTwoPeakFit(SampleWorkspace=input_ws, EnergyMin=xmin, EnergyMax=xmax, Minimizer='Levenberg-Marquardt', MaxIterations=500, OutputName=result) result_workspaces.append(result + '_Result') chi_workspaces.append(result + '_ChiSq') if self._plot: mp.plotSpectrum(result + '_ChiSq', [0, 1], error_bars=False) mp.plotSpectrum(result + '_Result', 0, error_bars=True)
def _plot_result(self): import mantidplot as mp mp.plotSpectrum(self._scan_ws + '_el_eq1', 0, error_bars=True) mp.plotSpectrum(self._scan_ws + '_inel_eq1', 0, error_bars=True) mp.plotSpectrum(self._scan_ws + '_el_eq2', 0, error_bars=True) mp.plotSpectrum(self._scan_ws + '_inel_eq2', 0, error_bars=True) mp.plotSpectrum(self._scan_ws + '_eisf', 0, error_bars=True) if self._msdfit: mp.plotSpectrum(self._scan_ws + '_msd', 1, error_bars=True)
def plot_graph(workspace): if mantidplot: return mantidplot.plotSpectrum(workspace, 0) else: return None
def FindBeamCentre(rlow, rupp, MaxIter = 10, xstart = None, ystart = None): """ Estimates the location of the effective beam centre given a good initial estimate. For more information go to this page mantidproject.org/Using_the_SANS_GUI_Beam_Centre_Finder @param rlow: mask around the (estimated) centre to this radius (in millimetres) @param rupp: don't include further out than this distance (mm) from the centre point @param MaxInter: don't calculate more than this number of iterations (default = 10) @param xstart: initial guess for the horizontal distance of the beam centre from the detector centre in meters (default the values in the mask file) @param ystart: initial guess for the distance of the beam centre from the detector centre vertically in metres (default the values in the mask file) @return: the best guess for the beam centre point """ XSTEP = ReductionSingleton().inst.cen_find_step YSTEP = ReductionSingleton().inst.cen_find_step original = ReductionSingleton().get_instrument().cur_detector_position(ReductionSingleton().get_sample().get_wksp_name()) if ReductionSingleton().instrument.lowAngDetSet: det_bank = 'rear' else: det_bank = 'front' if xstart or ystart: ReductionSingleton().set_beam_finder( sans_reduction_steps.BaseBeamFinder( float(xstart), float(ystart)),det_bank) beamcoords = ReductionSingleton().get_beam_center() XNEW = beamcoords[0] YNEW = beamcoords[1] xstart = beamcoords[0] ystart = beamcoords[1] mantid.sendLogMessage("::SANS:: xstart,ystart="+str(XNEW*1000.)+" "+str(YNEW*1000.)) _printMessage("Starting centre finding routine ...") #remove this if we know running the Reducer() doesn't change i.e. all execute() methods are const centre_reduction = copy.deepcopy(ReductionSingleton().reference()) LimitsR(str(float(rlow)), str(float(rupp)), quiet=True, reducer=centre_reduction) centre = CentreFinder(original) #this function moves the detector to the beam center positions defined above and returns an estimate of where the beam center is relative to the new center resX_old, resY_old = centre.SeekCentre(centre_reduction, [XNEW, YNEW]) centre_reduction = copy.deepcopy(ReductionSingleton().reference()) LimitsR(str(float(rlow)), str(float(rupp)), quiet=True, reducer=centre_reduction) mantid.sendLogMessage(centre.status_str(0, resX_old, resY_old)) # take first trial step XNEW = xstart + XSTEP YNEW = ystart + YSTEP graph_handle = None for i in range(1, MaxIter+1): it = i centre_reduction.set_beam_finder( sans_reduction_steps.BaseBeamFinder(XNEW, YNEW), det_bank) resX, resY = centre.SeekCentre(centre_reduction, [XNEW, YNEW]) centre_reduction = copy.deepcopy(ReductionSingleton().reference()) LimitsR(str(float(rlow)), str(float(rupp)), quiet=True, reducer=centre_reduction) mantid.sendLogMessage(centre.status_str(it, resX, resY)) try : if not graph_handle: #once we have a plot it will be updated automatically when the workspaces are updated graph_handle = mantidplot.plotSpectrum(centre.QUADS, 0) graph_handle.activeLayer().setTitle( centre.status_str(it, resX, resY)) except : #if plotting is not available it probably means we are running outside a GUI, in which case do everything but don't plot pass #have we stepped across the y-axis that goes through the beam center? if resX > resX_old: # yes with stepped across the middle, reverse direction and half the step size XSTEP = -XSTEP/2. if resY > resY_old: YSTEP = -YSTEP/2. if abs(XSTEP) < 0.1251/1000. and abs(YSTEP) < 0.1251/1000. : # this is the success criteria, we've close enough to the center mantid.sendLogMessage("::SANS:: Converged - check if stuck in local minimum!") break resX_old = resX resY_old = resY XNEW += XSTEP YNEW += YSTEP if it == MaxIter: mantid.sendLogMessage("::SANS:: Out of iterations, new coordinates may not be the best!") XNEW -= XSTEP YNEW -= YSTEP ReductionSingleton().set_beam_finder( sans_reduction_steps.BaseBeamFinder(XNEW, YNEW), det_bank) _printMessage("Centre coordinates updated: [" + str(XNEW)+ ", "+ str(YNEW) + ']') return XNEW, YNEW
def PyExec(self): """ Alg execution. """ instrument = self.getProperty(INSTRUMENT_PROP).value run_number = self.getProperty(RUN_NUM_PROP).value fit_deadtime = self.getProperty(FIT_DEADTIME_PROP).value fix_phases = self.getProperty(FIX_PHASES_PROP).value default_level = self.getProperty(DEFAULT_LEVEL).value sigma_looseness = self.getProperty(SIGMA_LOOSENESS_PROP).value groupings_file = self.getProperty(GROUPINGS_PROP).value in_phases_file = self.getProperty(PHASES_PROP).value in_deadtimes_file = self.getProperty(DEADTIMES_PROP).value out_phases_file = self.getProperty(PHASES_RESULT_PROP).value out_deadtimes_file = self.getProperty(DEADTIMES_RESULT_PROP).value isis = config.getFacility('ISIS') padding = isis.instrument(instrument).zeroPadding(0) run_name = instrument + str(run_number).zfill(padding) try: run_number = int(run_number) except: raise RuntimeError("'%s' is not an integer run number." % run_number) try: run_file_path = FileFinder.findRuns(run_name)[0] except: raise RuntimeError("Unable to find file for run %i" % run_number) if groupings_file == "": groupings_file = DEFAULT_GROUPINGS_FILENAME % instrument # Load data and other info from input files. def temp_hidden_ws_name(): """Generate a unique name for a temporary, hidden workspace.""" selection = string.ascii_lowercase + string.ascii_uppercase + string.digits return '__temp_MaxEnt_' + ''.join( random.choice(selection) for _ in range(20)) input_data_ws_name = temp_hidden_ws_name() LoadMuonNexus(Filename=run_file_path, OutputWorkspace=input_data_ws_name) input_data_ws = mtd[input_data_ws_name] if isinstance(input_data_ws, WorkspaceGroup): Logger.get("MaxEnt").warning( "Multi-period data is not currently supported. Just using first period." ) input_data_ws = input_data_ws[0] groupings_ws_name = temp_hidden_ws_name() LoadDetectorsGroupingFile(InputFile=groupings_file, OutputWorkspace=groupings_ws_name) groupings_ws = mtd[groupings_ws_name] def yield_floats_from_file(path): """Given a path to a file with a float on each line, will return the floats one at a time. Throws otherwise. Strips whitespace and ignores empty lines.""" with open(path, 'r') as f: for i, line in enumerate(line.strip() for line in f): if line == "": continue try: yield float(line) except: raise RuntimeError( "Parsing error in '%s': Line %d: '%s'." % (path, i, line)) input_phases = np.array(list(yield_floats_from_file(in_phases_file))) input_phases_size = len(input_phases) input_deadtimes = np.array( list(yield_floats_from_file(in_deadtimes_file))) input_deadtimes_size = len(input_deadtimes) n_bins = input_data_ws.blocksize() n_detectors = input_data_ws.getNumberHistograms() def time_value_to_time_channel_index(value): """Given a time value, will return the index of the time channel in which the value falls.""" bin_width = input_data_ws.readX(0)[1] - input_data_ws.readX(0)[0] diff = value - input_data_ws.readX(0)[0] return int(diff / bin_width) # Mantid corrects for time zero on loading, so we want to find the actual channels # where 0.0 occurs, and where we have values of 0.1 onwards. time_zero_channel = time_value_to_time_channel_index(0.0) first_good_channel = time_value_to_time_channel_index(0.1) input_data = np.concatenate( [input_data_ws.readY(i) for i in range(n_detectors)]) groupings = [ groupings_ws.readY(row)[0] for row in range(groupings_ws.getNumberHistograms()) ] groupings = map(int, groupings) n_groups = len(set(groupings)) # Cleanup. input_data_ws.delete() groupings_ws.delete() # We're faced with the problem of providing more than a dozen parameters to # the Fortran, which can be a bit messy (especially on the Fortran side of # things where we need to make "Cf2py" declarations). A cleaner way of # doing this is to simply pass in a few callbacks -- one for each input # type -- and have the Fortran provide the name of the variable it wants # to the callback. The callback will then look up the corresponding value # and feed it back to the Fortran. # # We also have a callback for printing to the results log. self.int_vars = { "RunNo": run_number, "frames": FRAMES, "res": RES, "Tzeroch": time_zero_channel, "firstgoodch": first_good_channel, "ptstofit": POINTS_TO_FIT, "histolen": n_bins, "nhisto": n_detectors, "n_groups": n_groups, } self.float_vars = { "deflevel": default_level, "sigloose": sigma_looseness, } self.bool_vars = { "fixphase": fix_phases, "fitdt": fit_deadtime, } self._assert_map_values_are_of_expected_type() def lookup(par_name, par_map, default): """The basis of the callbacks passed to the Fortran. Given a parameter name it will consult the appropriate variable map, and return the corresponding value of the parameter. Else return a default and log a warning if a parameter with the name does not exist.""" par_name = par_name.strip() if par_name in par_map: return par_map[par_name] msg = """WARNING: tried to find a value for parameter with name %s but could not find one. Default of \"%s\" provided.""" % (par_name, default) Logger.get("MaxEnt").warning(msg) return default def log(priority, message): """Log the given message with given priority.""" try: logger = getattr(Logger.get("MaxEnt"), priority.lower()) except AttributeError: # If we don't recognise the priority, use warning() as a default. logger = getattr(Logger.get("MaxEnt"), "warning") logger(message) return True # The Fortran expects arrays to be of a certain size, so any arrays that # aren't big enough need to be padded. input_phases = self._pad_to_length_with_zeros(input_phases, MAX_HISTOS) input_deadtimes = self._pad_to_length_with_zeros( input_deadtimes, MAX_HISTOS) input_data = self._pad_to_length_with_zeros(input_data, MAX_INPUT_DATA_SIZE) groupings = self._pad_to_length_with_zeros(groupings, MAX_HISTOS) # TODO: Return the contents of "NNNNN.max", instead of writing to file. f_out, fchan_out, output_deadtimes, output_phases, chi_sq = maxent.mantid_maxent( # Input data and other info: input_data, groupings, input_deadtimes, input_phases, # Variable-lookup callbacks: lambda par_name: lookup(par_name, self.int_vars, 0), lambda par_name: lookup(par_name, self.float_vars, 0.0), lambda par_name: lookup(par_name, self.bool_vars, False), # Callback for logging: log) def write_items_to_file(path, items): """Given a path to a file and a list of items, will write the items to the file, one on each line.""" with open(path, 'w') as f: for item in items: f.write(str(item) + "\n") # Chop the padded outputs back down to the correct size. output_phases = output_phases[:input_phases_size] output_deadtimes = output_deadtimes[:input_deadtimes_size] input_phases = input_phases[:input_phases_size] input_deadtimes = input_deadtimes[:input_deadtimes_size] fchan_out = fchan_out[:n_bins] f_out = f_out[:n_bins] write_items_to_file(out_phases_file, output_phases) write_items_to_file(out_deadtimes_file, output_deadtimes) log_output = "\nDead times in:\n" + str(input_deadtimes) + "\n" +\ "\nDead times out:\n" + str(output_deadtimes) + "\n" +\ "\nPhases in:\n" + str(input_phases) + "\n" +\ "\nPhases out:\n" + str(output_phases) + "\n" + \ "\nGroupings:\n" + str(groupings) + "\n" +\ "\nChi Squared:\n" + str(chi_sq) + "\n" +\ "\nInput variables:\n" for type_map in self.int_vars, self.float_vars, self.bool_vars: for name, value in type_map.items(): log_output += str(name) + " = " + str(value) + "\n" Logger.get("MaxEnt").notice(log_output) # Generate our own output ws name if the user has not provided one. out_ws_name = self.getPropertyValue(OUT_WS_PROP) if out_ws_name == "": out_ws_name = run_name + "; MaxEnt" self.setPropertyValue(OUT_WS_PROP, out_ws_name) out_ws = CreateWorkspace(OutputWorkspace=out_ws_name, DataX=fchan_out[:n_bins], DataY=f_out[:n_bins]) self.setProperty(OUT_WS_PROP, out_ws) # MaxEnt inputs table. input_table_name = run_name + "; MaxEnt Input" input_table = CreateEmptyTableWorkspace( OutputWorkspace=input_table_name) input_table.addColumn("str", "Name") input_table.addColumn("str", "Value") inputs = itertools.chain(self.int_vars.items(), self.float_vars.items(), self.bool_vars.items()) for name, value in inputs: input_table.addRow([str(name), str(value)]) # Deadtimes and phases input/output table. dead_phases_table_name = run_name + "; MaxEnt Deadtimes & Phases" dead_phases_table = CreateEmptyTableWorkspace( OutputWorkspace=dead_phases_table_name) for column_name in "Deadtimes In", "Deadtimes Out", "Phases In", "Phases Out": dead_phases_table.addColumn("double", column_name) for row in zip(input_deadtimes, output_deadtimes, input_phases, output_phases): dead_phases_table.addRow(list(map(float, row))) # Chi-squared output table. chisq_table_name = run_name + "; MaxEnt Chi^2" chisq_table = CreateEmptyTableWorkspace( OutputWorkspace=chisq_table_name) chisq_table.addColumn("int", "Cycle") for iteration in range(10): chisq_table.addColumn("double", "Iter " + str(iteration + 1)) for cycle, data in enumerate(chi_sq): chisq_table.addRow([cycle + 1] + list(map(float, data))) all_output_ws = [ input_table_name, dead_phases_table_name, chisq_table_name, out_ws_name ] # The output workspaces of this algorithm belong in the same groups # that are created by the muon interface. If the appropriate group # doesn't exist already then it needs to be created. if not run_name in mtd: GroupWorkspaces(InputWorkspaces=all_output_ws, OutputWorkspace=run_name) else: group = mtd[run_name] for output_ws in all_output_ws: if not group.contains(output_ws): group.add(output_ws) out_ws.getAxis(0).getUnit().setLabel("Field", "G") out_ws.setYUnitLabel("P(B)") if INSIDE_MANTIDPLOT: mantidplot.plotSpectrum(out_ws, 0)
def PyExec(self): """ Alg execution. """ instrument = self.getProperty(INSTRUMENT_PROP).value run_number = self.getProperty(RUN_NUM_PROP).value fit_deadtime = self.getProperty(FIT_DEADTIME_PROP).value fix_phases = self.getProperty(FIX_PHASES_PROP).value default_level = self.getProperty(DEFAULT_LEVEL).value sigma_looseness = self.getProperty(SIGMA_LOOSENESS_PROP).value groupings_file = self.getProperty(GROUPINGS_PROP).value in_phases_file = self.getProperty(PHASES_PROP).value in_deadtimes_file = self.getProperty(DEADTIMES_PROP).value out_phases_file = self.getProperty(PHASES_RESULT_PROP).value out_deadtimes_file = self.getProperty(DEADTIMES_RESULT_PROP).value isis = config.getFacility('ISIS') padding = isis.instrument(instrument).zeroPadding(0) run_name = instrument + str(run_number).zfill(padding) try: run_number = int(run_number) except: raise RuntimeError("'%s' is not an integer run number." % run_number) try: run_file_path = FileFinder.findRuns(run_name)[0] except: raise RuntimeError("Unable to find file for run %i" % run_number) if groupings_file == "": groupings_file = DEFAULT_GROUPINGS_FILENAME % instrument # Load data and other info from input files. def temp_hidden_ws_name(): """Generate a unique name for a temporary, hidden workspace.""" selection = string.ascii_lowercase + string.ascii_uppercase + string.digits return '__temp_MaxEnt_' + ''.join(random.choice(selection) for _ in range(20)) input_data_ws_name = temp_hidden_ws_name() LoadMuonNexus(Filename=run_file_path, OutputWorkspace=input_data_ws_name) input_data_ws = mtd[input_data_ws_name] if isinstance(input_data_ws, WorkspaceGroup): Logger.get("MaxEnt").warning("Multi-period data is not currently supported. Just using first period.") input_data_ws = input_data_ws[0] groupings_ws_name = temp_hidden_ws_name() LoadDetectorsGroupingFile(InputFile=groupings_file, OutputWorkspace=groupings_ws_name) groupings_ws = mtd[groupings_ws_name] def yield_floats_from_file(path): """Given a path to a file with a float on each line, will return the floats one at a time. Throws otherwise. Strips whitespace and ignores empty lines.""" with open(path, 'r') as f: for i, line in enumerate(line.strip() for line in f): if line == "": continue try: yield float(line) except: raise RuntimeError("Parsing error in '%s': Line %d: '%s'." % (path, i, line)) input_phases = np.array(list(yield_floats_from_file(in_phases_file))) input_phases_size = len(input_phases) input_deadtimes = np.array(list(yield_floats_from_file(in_deadtimes_file))) input_deadtimes_size = len(input_deadtimes) n_bins = input_data_ws.blocksize() n_detectors = input_data_ws.getNumberHistograms() def time_value_to_time_channel_index(value): """Given a time value, will return the index of the time channel in which the value falls.""" bin_width = input_data_ws.readX(0)[1] - input_data_ws.readX(0)[0] diff = value - input_data_ws.readX(0)[0] return int(diff / bin_width) # Mantid corrects for time zero on loading, so we want to find the actual channels # where 0.0 occurs, and where we have values of 0.1 onwards. time_zero_channel = time_value_to_time_channel_index(0.0) first_good_channel = time_value_to_time_channel_index(0.1) input_data = np.concatenate([input_data_ws.readY(i) for i in range(n_detectors)]) groupings = [groupings_ws.readY(row)[0] for row in range(groupings_ws.getNumberHistograms())] groupings = map(int, groupings) n_groups = len(set(groupings)) # Cleanup. input_data_ws.delete() groupings_ws.delete() # We're faced with the problem of providing more than a dozen parameters to # the Fortran, which can be a bit messy (especially on the Fortran side of # things where we need to make "Cf2py" declarations). A cleaner way of # doing this is to simply pass in a few callbacks -- one for each input # type -- and have the Fortran provide the name of the variable it wants # to the callback. The callback will then look up the corresponding value # and feed it back to the Fortran. # # We also have a callback for printing to the results log. self.int_vars = { "RunNo" : run_number, "frames" : FRAMES, "res" : RES, "Tzeroch" : time_zero_channel, "firstgoodch" : first_good_channel, "ptstofit" : POINTS_TO_FIT, "histolen" : n_bins, "nhisto" : n_detectors, "n_groups" : n_groups, } self.float_vars = { "deflevel" : default_level, "sigloose" : sigma_looseness, } self.bool_vars = { "fixphase" : fix_phases, "fitdt" : fit_deadtime, } self._assert_map_values_are_of_expected_type() def lookup(par_name, par_map, default): """The basis of the callbacks passed to the Fortran. Given a parameter name it will consult the appropriate variable map, and return the corresponding value of the parameter. Else return a default and log a warning if a parameter with the name does not exist.""" par_name = par_name.strip() if par_name in par_map: return par_map[par_name] msg = """WARNING: tried to find a value for parameter with name %s but could not find one. Default of \"%s\" provided.""" % (par_name, default) Logger.get("MaxEnt").warning(msg) return default def log(priority, message): """Log the given message with given priority.""" try: logger = getattr(Logger.get("MaxEnt"), priority.lower()) except AttributeError: # If we don't recognise the priority, use warning() as a default. logger = getattr(Logger.get("MaxEnt"), "warning") logger(message) return True # The Fortran expects arrays to be of a certain size, so any arrays that # aren't big enough need to be padded. input_phases = self._pad_to_length_with_zeros(input_phases, MAX_HISTOS) input_deadtimes = self._pad_to_length_with_zeros(input_deadtimes, MAX_HISTOS) input_data = self._pad_to_length_with_zeros(input_data, MAX_INPUT_DATA_SIZE) groupings = self._pad_to_length_with_zeros(groupings, MAX_HISTOS) # TODO: Return the contents of "NNNNN.max", instead of writing to file. f_out, fchan_out, output_deadtimes, output_phases, chi_sq = maxent.mantid_maxent( # Input data and other info: input_data, groupings, input_deadtimes, input_phases, # Variable-lookup callbacks: lambda par_name: lookup(par_name, self.int_vars, 0), lambda par_name: lookup(par_name, self.float_vars, 0.0), lambda par_name: lookup(par_name, self.bool_vars, False), # Callback for logging: log ) def write_items_to_file(path, items): """Given a path to a file and a list of items, will write the items to the file, one on each line.""" with open(path, 'w') as f: for item in items: f.write(str(item) + "\n") # Chop the padded outputs back down to the correct size. output_phases = output_phases[:input_phases_size] output_deadtimes = output_deadtimes[:input_deadtimes_size] input_phases = input_phases[:input_phases_size] input_deadtimes = input_deadtimes[:input_deadtimes_size] fchan_out = fchan_out[:n_bins] f_out = f_out[:n_bins] write_items_to_file(out_phases_file, output_phases) write_items_to_file(out_deadtimes_file, output_deadtimes) log_output = "\nDead times in:\n" + str(input_deadtimes) + "\n" +\ "\nDead times out:\n" + str(output_deadtimes) + "\n" +\ "\nPhases in:\n" + str(input_phases) + "\n" +\ "\nPhases out:\n" + str(output_phases) + "\n" + \ "\nGroupings:\n" + str(groupings) + "\n" +\ "\nChi Squared:\n" + str(chi_sq) + "\n" +\ "\nInput variables:\n" for type_map in self.int_vars, self.float_vars, self.bool_vars: for name, value in type_map.items(): log_output += str(name) + " = " + str(value) + "\n" Logger.get("MaxEnt").notice(log_output) # Generate our own output ws name if the user has not provided one. out_ws_name = self.getPropertyValue(OUT_WS_PROP) if out_ws_name == "": out_ws_name = run_name + "; MaxEnt" self.setPropertyValue(OUT_WS_PROP, out_ws_name) out_ws = CreateWorkspace(OutputWorkspace=out_ws_name, DataX=fchan_out[:n_bins], DataY=f_out[:n_bins]) self.setProperty(OUT_WS_PROP, out_ws) # MaxEnt inputs table. input_table_name = run_name + "; MaxEnt Input" input_table = CreateEmptyTableWorkspace(OutputWorkspace = input_table_name) input_table.addColumn("str", "Name") input_table.addColumn("str", "Value") inputs = itertools.chain(self.int_vars.items(), self.float_vars.items(), self.bool_vars.items()) for name, value in inputs: input_table.addRow([str(name), str(value)]) # Deadtimes and phases input/output table. dead_phases_table_name = run_name + "; MaxEnt Deadtimes & Phases" dead_phases_table = CreateEmptyTableWorkspace(OutputWorkspace = dead_phases_table_name) for column_name in "Deadtimes In", "Deadtimes Out", "Phases In", "Phases Out": dead_phases_table.addColumn("double", column_name) for row in zip(input_deadtimes, output_deadtimes, input_phases, output_phases): dead_phases_table.addRow(list(map(float, row))) # Chi-squared output table. chisq_table_name = run_name + "; MaxEnt Chi^2" chisq_table = CreateEmptyTableWorkspace(OutputWorkspace = chisq_table_name) chisq_table.addColumn("int", "Cycle") for iteration in range(10): chisq_table.addColumn("double", "Iter " + str(iteration + 1)) for cycle, data in enumerate(chi_sq): chisq_table.addRow([cycle + 1] + list(map(float,data))) all_output_ws = [input_table_name, dead_phases_table_name, chisq_table_name, out_ws_name] # The output workspaces of this algorithm belong in the same groups # that are created by the muon interface. If the appropriate group # doesn't exist already then it needs to be created. if not run_name in mtd: GroupWorkspaces(InputWorkspaces = all_output_ws, OutputWorkspace = run_name) else: group = mtd[run_name] for output_ws in all_output_ws: if not group.contains(output_ws): group.add(output_ws) out_ws.getAxis(0).getUnit().setLabel("Field", "G") out_ws.setYUnitLabel("P(B)") if INSIDE_MANTIDPLOT: mantidplot.plotSpectrum(out_ws, 0)
def _plot_result(self, ws, list): #Plot import mantidplot as mp mp.plotSpectrum(ws, list)
def _plot_result(self): #Plot import mantidplot as mp mp.plotSpectrum(self._theta_name, [0,1,2]) mp.plotSpectrum(self._deriv_name, [0,1,2,3])