Ejemplo n.º 1
0
def convertToElasticQ(input_ws, output_ws=None):
  """
    Helper function to convert the spectrum axis of a sample to ElasticQ.

    @param input_ws - the name of the workspace to convert from
    @param output_ws - the name to call the converted workspace
  """
  
  if output_ws is None:
    output_ws = input_ws
   
  axis = mtd[input_ws].getAxis(1)
  if axis.isSpectra():
      e_fixed = getEfixed(input_ws)
      ConvertSpectrumAxis(input_ws,Target='ElasticQ',EMode='Indirect',EFixed=e_fixed,OutputWorkspace=output_ws)
  
  elif axis.isNumeric():
      #check that units are Momentum Transfer
      if axis.getUnit().unitID() != 'MomentumTransfer':
          logger.error('Input must have axis values of Q')
          sys.exit()
      
      CloneWorkspace(input_ws, OutputWorkspace=output_ws)
  else:
    logger.error('Input workspace must have either spectra or numeric axis.')
    sys.exit()
Ejemplo n.º 2
0
    def PyExec(self):
        from mantid import logger
        from IndirectCommon import StartTime, EndTime

        self._setup()
        StartTime('CreateCalibrationWorkspace')

        runs = []
        for in_file in self._input_files:
            (_, filename) = os.path.split(in_file)
            (root, _) = os.path.splitext(filename)
            try:
                Load(Filename=in_file,
                     OutputWorkspace=root,
                     SpectrumMin=int(self._spec_range[0]),
                     SpectrumMax=int(self._spec_range[1]),
                     LoadLogFiles=False)
                runs.append(root)
            except Exception as exc:
                logger.error('Could not load raw file "%s": %s' %
                             (in_file, str(exc)))

        calib_ws_name = 'calibration'
        if len(runs) > 1:
            MergeRuns(InputWorkspaces=",".join(runs),
                      OutputWorkspace=calib_ws_name)
            factor = 1.0 / len(runs)
            Scale(InputWorkspace=calib_ws_name,
                  OutputWorkspace=calib_ws_name,
                  Factor=factor)
        else:
            calib_ws_name = runs[0]

        CalculateFlatBackground(InputWorkspace=calib_ws_name,
                                OutputWorkspace=calib_ws_name,
                                StartX=self._back_range[0],
                                EndX=self._back_range[1],
                                Mode='Mean')

        from inelastic_indirect_reduction_steps import NormaliseToUnityStep
        ntu = NormaliseToUnityStep()
        ntu.set_factor(self._intensity_scale)
        ntu.set_peak_range(self._peak_range[0], self._peak_range[1])
        ntu.execute(None, calib_ws_name)

        RenameWorkspace(InputWorkspace=calib_ws_name,
                        OutputWorkspace=self._out_ws)

        # Remove old workspaces
        if len(runs) > 1:
            for run in runs:
                DeleteWorkspace(Workspace=run)

        self.setProperty('OutputWorkspace', self._out_ws)
        self._post_process()

        EndTime('CreateCalibrationWorkspace')
Ejemplo n.º 3
0
 def validateParams(self):
     '''Check parameters are positive'''
     height = self.getParameterValue('height')
     tau = self.getParameterValue('tau')
     beta = self.getParameterValue('beta')
     for name,value in {'height':height, 'tau':tau, 'beta':beta}.items():
         if value <=0:
             message = 'Parameter {} in StretchedExpFT must be positive. Got {} instead'.format(name, str(value))
             logger.error(message)
             #raise ValueError(message)
             return None
     return {'height':height, 'tau':tau, 'beta':beta}
Ejemplo n.º 4
0
 def validateParams(self):
     '''Check parameters within expected range'''
     intensity = self.getParameterValue('Intensity')
     if intensity <=0:
         message = 'Parameter Intensity in DSFinterp1DFit must be positive. Got {0} instead'.format(intensity)
         logger.error(message)
         return None
     f = self.getParameterValue('TargetParameter')
     if f < self._fmin or f > self._fmax:
         message = 'TargetParameter {0} is out of bounds [{1}, {2}]. Applying penalty...'.format(f, self._fmin, self._fmax)
         logger.error(message)
         return None
     return {'Intensity':intensity, 'TargetParameter':f}
Ejemplo n.º 5
0
 def validateParams(self):
     """Check parameters are positive"""
     height = self.getParameterValue('height')
     tau = self.getParameterValue('tau')
     beta = self.getParameterValue('beta')
     origin = self.getParameterValue('Origin')
     for name, value in {'height': height, 'tau': tau, 'beta': beta}.items():
         if value <= 0:
             message = 'Parameter {} in StretchedExpFT must be positive. Got {} instead'.format(name, str(value))
             logger.error(message)
             # raise ValueError(message)
             return None
     return {'height': height, 'tau': tau, 'beta': beta, 'Origin': origin}
Ejemplo n.º 6
0
 def validateParams(self):
     """Check parameters are positive"""
     height = self.getParameterValue("height")
     tau = self.getParameterValue("tau")
     beta = self.getParameterValue("beta")
     origin = self.getParameterValue("Origin")
     for name, value in {"height": height, "tau": tau, "beta": beta}.items():
         if value <= 0:
             message = "Parameter {} in StretchedExpFT must be positive. Got {} instead".format(name, str(value))
             logger.error(message)
             # raise ValueError(message)
             return None
     return {"height": height, "tau": tau, "beta": beta, "Origin": origin}
Ejemplo n.º 7
0
 def validateParams(self):
     '''Check parameters within expected range'''
     intensity = self.getParameterValue('Intensity')
     if intensity <= 0:
         message = 'Parameter Intensity in DSFinterp1DFit must be positive. Got {0} instead'.format(
             intensity)
         logger.error(message)
         return None
     f = self.getParameterValue('TargetParameter')
     if f < self._fmin or f > self._fmax:
         message = 'TargetParameter {0} is out of bounds [{1}, {2}]. Applying penalty...'.format(
             f, self._fmin, self._fmax)
         logger.error(message)
         return None
     return {'Intensity': intensity, 'TargetParameter': f}
Ejemplo n.º 8
0
def ReadMap(path):
    asc = loadFile(path)

    lasc = len(asc)
    logger.information('Map file : ' + path + ' ; spectra = ' + str(lasc - 1))
    val = ExtractInt(asc[0])
    numb = val[0]
    if numb != (lasc - 1):
        error = 'Number of lines  not equal to number of spectra'
        logger.error(error)
        sys.exit(error)
    map = []
    for n in range(1, lasc):
        val = ExtractInt(asc[n])
        map.append(val[1])
    return map
Ejemplo n.º 9
0
def ReadMap(path):
    asc = loadFile(path)

    lasc = len(asc)
    logger.information('Map file : ' + path + ' ; spectra = ' + str(lasc - 1))
    val = ExtractInt(asc[0])
    numb = val[0]
    if numb != (lasc - 1):
        error = 'Number of lines  not equal to number of spectra'
        logger.error(error)
        sys.exit(error)
    map = []
    for n in range(1, lasc):
        val = ExtractInt(asc[n])
        map.append(val[1])
    return map
Ejemplo n.º 10
0
def resolution(files, iconOpt, rebinParam, bground,
        instrument, analyser, reflection,
        Res=True, factor=None, Plot=False, Verbose=False, Save=False):
    reducer = inelastic_indirect_reducer.IndirectReducer()
    reducer.set_instrument_name(instrument)
    reducer.set_detector_range(iconOpt['first']-1,iconOpt['last']-1)
    for file in files:
        reducer.append_data_file(file)
    parfile = instrument +"_"+ analyser +"_"+ reflection +"_Parameters.xml"
    reducer.set_parameter_file(parfile)
    reducer.set_grouping_policy('All')
    reducer.set_sum_files(True)

    try:
        reducer.reduce()
    except Exception, e:
        logger.error(str(e))
        return
Ejemplo n.º 11
0
 def function1D(self, xvals):
     ''' Fit using the interpolated structure factor '''
     p=self.validateParams()
     if not p:
         return numpy.zeros(len(xvals), dtype=float) # return zeros if parameters not valid
     # The first time the function is called requires initialization of the interpolator
     if self._channelgroup is None:
         # Check consistency of the input
         # check InputWorkspaces have at least the workspace index
         for w in self._InputWorkspaces:
             if mtd[w].getNumberHistograms() <= self._WorkspaceIndex:
                 message = 'Numer of histograms in Workspace {0} does not allow for workspace index {1}'.format(w,self._WorkspaceIndex)
                 logger.error(message)
                 raise IndexError(message)
         # check number of input workspaces and parameters is the same
         if len(self._ParameterValues) != len(self._InputWorkspaces):
             message = 'Number of InputWorkspaces and ParameterValues should be the same.'+\
                       ' Found {0} and {1}, respectively'.format(len(self._InputWorkspaces), len(self._ParameterValues))
             logger.error(message)
             raise ValueError(message)
         # check the regression type is valid
         if self._RegressionType not in self._RegressionTypes:
             message = 'Regression type {0} not implemented. choose one of {1}'.format(self._RegressionType,
                                                                                       ', '.join(self._RegressionTypes))
             logger.error(message)
             raise NotImplementedError(message)
         # check the regression window is appropriate for the regression type selected
         if self._RegressionWindow < self._minWindow[self._RegressionType]:
             message = 'RegressionWindow must be equal or bigger than '+\
                       '{0} for regression type {1}'.format(self._minWindow[self._RegressionType], self._RegressionType)
             logger.error(message)
             raise ValueError(message)
         # Initialize the energies of the channels with the first of the input workspaces
         self._xvalues = numpy.copy( mtd[ self._InputWorkspaces[0] ].dataX(self._WorkspaceIndex) )
         if len(self._xvalues) == 1+ len( mtd[ self._InputWorkspaces[0] ].dataY(self._WorkspaceIndex) ):
             self._xvalues = (self._xvalues[1:]+self._xvalues[:-1])/2.0  # Deal with histogram data
         # Initialize the channel group
         nf = len(self._ParameterValues)
         # Load the InputWorkspaces into a group of dynamic structure factors
         from dsfinterp.dsf import Dsf
         from dsfinterp.dsfgroup import DsfGroup
         dsfgroup = DsfGroup()
         for idsf in range(nf):
             dsf = Dsf()
             dsf.SetIntensities( mtd[ self._InputWorkspaces[idsf] ].dataY(self._WorkspaceIndex) )
             dsf.errors = None # do not incorporate error data
             if self._LoadErrors:
                 dsf.SetErrors(mtd[ self._InputWorkspaces[idsf] ].dataE(self._WorkspaceIndex))
             dsf.SetFvalue( self._ParameterValues[idsf] )
             dsfgroup.InsertDsf(dsf)
         # Create the interpolator
         from dsfinterp.channelgroup import ChannelGroup
         self._channelgroup = ChannelGroup()
         self._channelgroup.InitFromDsfGroup(dsfgroup)
         if self._LocalRegression:
             self._channelgroup.InitializeInterpolator(running_regr_type=self._RegressionType, windowlength=self._RegressionWindow)
         else:
             self._channelgroup.InitializeInterpolator(windowlength=0)
     # channel group has been initialized, so evaluate the interpolator
     dsf = self._channelgroup(p['TargetParameter'])
     # Linear interpolation between the energies of the channels and the xvalues we require
     # NOTE: interpolator evaluates to zero for any of the xvals outside of the domain defined by self._xvalues
     intensities_interpolator = scipy.interpolate.interp1d(self._xvalues, p['Intensity']*dsf.intensities, kind='linear')
     return intensities_interpolator(xvals)  # can we pass by reference?
Ejemplo n.º 12
0
    def PyExec(self):
        from mantid import config, logger
        from IndirectCommon import StartTime, EndTime
        import inelastic_indirect_reducer

        self._setup()

        StartTime('InelasticIndirectReduction')

        # Setup reducer
        reducer = inelastic_indirect_reducer.IndirectReducer()

        reducer.set_rename(True)

        reducer.set_instrument_name(self._instrument)
        reducer.set_parameter_file(self._param_file)
        try:
            reducer.set_output_path(config["defaultsave.directory"])
        except RuntimeError:
            pass # Use default

        for data_file in self._data_files:
            reducer.append_data_file(data_file)

        reducer.set_sum_files(self._sum_files)

        reducer.set_detector_range(int(self._detector_range[0]) - 1, int(self._detector_range[1]) - 1)

        self._use_calib_ws = self._calib_ws_name != ''
        if self._use_calib_ws:
            logger.information('Using calibration workspace: %s' % self._calib_ws_name)
            reducer.set_calibration_workspace(self._calib_ws_name)

        if len(self._background_range) == 2:
            logger.debug('Using background range: ' + str(self._background_range))
            reducer.set_background(float(self._background_range[0]), float(self._background_range[1]))

        # TODO: There should be a better way to do this
        self._use_detailed_balance = self._detailed_balance != -1.0
        if self._use_detailed_balance:
            logger.debug('Using detailed balance: ' + str(self._detailed_balance))
            reducer.set_detailed_balance(self._detailed_balance)

        if self._rebin_string != '':
            logger.debug('Using rebin string: ' + self._rebin_string)
            reducer.set_rebin_string(self._rebin_string)

        self._use_scale_factor = self._scale_factor != 1.0
        if self._use_scale_factor:
            logger.debug('Using scale factor: ' + str(self._scale_factor))
            reducer.set_scale_factor(self._scale_factor)

        if self._map_file != '':
            logger.debug('Using mapping file: ' + str(self._map_file))
            reducer.set_grouping_policy(self._map_file)

        reducer.set_fold_multiple_frames(self.getProperty('Fold').value)
        reducer.set_save_to_cm_1(self.getProperty('SaveCM1').value)
        reducer.set_save_formats(self._save_formats)

        # Do reduction and get result workspaces
        reducer.reduce()
        ws_list = reducer.get_result_workspaces()

        self._plot_ws = ws_list[0]

        if len(ws_list) < 1:
            logger.error('Failed to complete reduction')
            return

        # Add sample logs to output workspace(s)
        for workspace in ws_list:
            self._add_ws_logs(workspace)

        # Group output workspaces
        GroupWorkspaces(InputWorkspaces=ws_list, OutputWorkspace=self._out_ws_group)
        self.setProperty('OutputWorkspace', self._out_ws_group)

        # Do plotting
        if self._plot_type != 'none':
            self._plot()

        EndTime('InelasticIndirectReduction')
Ejemplo n.º 13
0
def unsupported_message():
    logger.error(
        'F2Py functionality not currently available on your platform.')
    sys.exit()
Ejemplo n.º 14
0
    if ystart==ystop or xstart==xstop:
        X,Y=np.meshgrid(xvals,yvals)
        plt.pcolormesh(X,Y,arrayToPlot,shading='gouraud')
    else:
        arrayToPlot_trim = arrayToPlot[xstart:xstop, ystart:ystop]
        Y,X=np.meshgrid(yvals[ystart:ystop],xvals[xstart:xstop])
        normmasked=np.ma.masked_where(arrayToPlot_trim==0,arrayToPlot_trim)
        plt.pcolormesh(X,Y,normmasked,shading='gouraud')
    plt.xlabel(dim0.getName())
    plt.ylabel(dim1.getName())
    plt.title(dim2.getName()+' integrated from '+"{0:.3f}".format(d2min)+' to '+"{0:.3f}".format(d2max)) 

if __name__ == "__main__":
    # check number of arguments
    if (len(sys.argv) != 3): 
        logger.error("autoreduction code requires a filename and an output directory")
        sys.exit()
    if not(os.path.isfile(sys.argv[1])):
        logger.error("data file "+sys.argv[1]+ " not found")
        sys.exit()    
    else:
        filename = sys.argv[1]
        outdir = sys.argv[2]
    nexus_file=sys.argv[1]
    output_directory=sys.argv[2]
    output_file=os.path.split(nexus_file)[-1].replace('.nxs.h5','')

    # load file
    raw=Load(nexus_file)
    
    # Do the cross-correlation and save the file
Ejemplo n.º 15
0
def unsupported_message():
    logger.error(UNSUPPORTED_PLATFORM_MESSAGE)
    sys.exit()
Ejemplo n.º 16
0
def unsupported_message():
    logger.error('F2Py functionality not currently available on your platform.')
    sys.exit()
Ejemplo n.º 17
0
    else:
        arrayToPlot_trim = arrayToPlot[xstart:xstop, ystart:ystop]
        Y, X = np.meshgrid(yvals[ystart:ystop], xvals[xstart:xstop])
        normmasked = np.ma.masked_where(arrayToPlot_trim == 0,
                                        arrayToPlot_trim)
        plt.pcolormesh(X, Y, normmasked, shading='gouraud')
    plt.xlabel(dim0.getName())
    plt.ylabel(dim1.getName())
    plt.title(dim2.getName() + ' integrated from ' + "{:.3f}".format(d2min) +
              ' to ' + "{:.3f}".format(d2max))


if __name__ == "__main__":
    # check number of arguments
    if (len(sys.argv) != 3):
        logger.error(
            "autoreduction code requires a filename and an output directory")
        sys.exit()
    if not (os.path.isfile(sys.argv[1])):
        logger.error("data file " + sys.argv[1] + " not found")
        sys.exit()
    else:
        filename = sys.argv[1]
        outdir = sys.argv[2]
    nexus_file = sys.argv[1]
    output_directory = sys.argv[2]
    output_file = os.path.split(nexus_file)[-1].replace('.nxs.h5', '')

    # load file
    raw = Load(nexus_file)

    # Do the cross-correlation and save the file.
Ejemplo n.º 18
0
 def function1D(self, xvals):
     ''' Fit using the interpolated structure factor '''
     p = self.validateParams()
     if not p:
         return numpy.zeros(
             len(xvals),
             dtype=float)  # return zeros if parameters not valid
     # The first time the function is called requires initialization of the interpolator
     if self._channelgroup is None:
         # Check consistency of the input
         # check InputWorkspaces have at least the workspace index
         for w in self._InputWorkspaces:
             if mtd[w].getNumberHistograms() <= self._WorkspaceIndex:
                 message = 'Numer of histograms in Workspace {0} does not allow for workspace index {1}'.format(
                     w, self._WorkspaceIndex)
                 logger.error(message)
                 raise IndexError(message)
         # check number of input workspaces and parameters is the same
         if len(self._ParameterValues) != len(self._InputWorkspaces):
             message = 'Number of InputWorkspaces and ParameterValues should be the same.'+\
                       ' Found {0} and {1}, respectively'.format(len(self._InputWorkspaces), len(self._ParameterValues))
             logger.error(message)
             raise ValueError(message)
         # check the regression type is valid
         if self._RegressionType not in self._RegressionTypes:
             message = 'Regression type {0} not implemented. choose one of {1}'.format(
                 self._RegressionType, ', '.join(self._RegressionTypes))
             logger.error(message)
             raise NotImplementedError(message)
         # check the regression window is appropriate for the regression type selected
         if self._RegressionWindow < self._minWindow[self._RegressionType]:
             message = 'RegressionWindow must be equal or bigger than '+\
                       '{0} for regression type {1}'.format(self._minWindow[self._RegressionType], self._RegressionType)
             logger.error(message)
             raise ValueError(message)
         # Initialize the energies of the channels with the first of the input workspaces
         self._xvalues = numpy.copy(mtd[self._InputWorkspaces[0]].dataX(
             self._WorkspaceIndex))
         if len(self._xvalues) == 1 + len(
                 mtd[self._InputWorkspaces[0]].dataY(self._WorkspaceIndex)):
             self._xvalues = (self._xvalues[1:] + self._xvalues[:-1]
                              ) / 2.0  # Deal with histogram data
         # Initialize the channel group
         nf = len(self._ParameterValues)
         # Load the InputWorkspaces into a group of dynamic structure factors
         from dsfinterp.dsf import Dsf
         from dsfinterp.dsfgroup import DsfGroup
         dsfgroup = DsfGroup()
         for idsf in range(nf):
             dsf = Dsf()
             dsf.SetIntensities(mtd[self._InputWorkspaces[idsf]].dataY(
                 self._WorkspaceIndex))
             dsf.errors = None  # do not incorporate error data
             if self._LoadErrors:
                 dsf.SetErrors(mtd[self._InputWorkspaces[idsf]].dataE(
                     self._WorkspaceIndex))
             dsf.SetFvalue(self._ParameterValues[idsf])
             dsfgroup.InsertDsf(dsf)
         # Create the interpolator
         from dsfinterp.channelgroup import ChannelGroup
         self._channelgroup = ChannelGroup()
         self._channelgroup.InitFromDsfGroup(dsfgroup)
         if self._LocalRegression:
             self._channelgroup.InitializeInterpolator(
                 running_regr_type=self._RegressionType,
                 windowlength=self._RegressionWindow)
         else:
             self._channelgroup.InitializeInterpolator(windowlength=0)
     # channel group has been initialized, so evaluate the interpolator
     dsf = self._channelgroup(p['TargetParameter'])
     # Linear interpolation between the energies of the channels and the xvalues we require
     # NOTE: interpolator evaluates to zero for any of the xvals outside of the domain defined by self._xvalues
     intensities_interpolator = scipy.interpolate.interp1d(self._xvalues,
                                                           p['Intensity'] *
                                                           dsf.intensities,
                                                           kind='linear')
     return intensities_interpolator(xvals)  # can we pass by reference?