def create_param_vs_Y(som, param, param_func, param_axis, **kwargs): """ This function takes a group of single spectrum with any given axes (wavelength, energy etc.). The function can optionally rebin those axes to a given axis. It then creates a 2D spectrum by using a parameter, parameter functiona and a given axis for the lookup locations and places each original spectrum in the found location. @param som: The input object with arbitrary (but same) axis spectra @type som: C{SOM.SOM} @param param: The parameter that will be used for creating the lookups. @type param: C{string} @param param_func: The function that will convert the parameter into the values for lookups. @type param_func: C{string} @param param_axis: The axis that will be searched for the lookup values. @type param_axis: C{nessi_list.NessiList} @param kwargs: A list of keyword arguments that the function accepts: @keyword rebin_axis: An axis to rebin the given spectra to. @type rebin_axis: C{nessi_list.NessiList} @keyword data_type: The name of the data type which can be either I{histogram}, I{density} or I{coordinate}. The default value will be I{histogram}. @type data_type: C{string} @keyword pixnorm: A flag to track the number of pixels that contribute to a bin and then normalize the bin by that number. @type pixnorm: C{boolean} @keyword prnorm: A parameter to track and determine a range (max - min) for each bin the requested parameter axis. The range will then be divided into the final summed spectrum for the given bin. @type prnorm: C{string} @keyword binnorm: A flag that turns on the scaling of each stripe of the y-axis by the individual bins widths from the y-axis. @type binnorm: C{boolean} @keyword so_id: The identifier represents a number, string, tuple or other object that describes the resulting C{SO}. @type so_id: C{int}, C{string}, C{tuple}, C{pixel ID} @keyword y_label: The dependent axis label @type y_label: C{string} @keyword y_units: The dependent axis units @type y_units: C{string} @keyword x_labels: The two independent axis labels @type x_labels: C{list} of C{string}s @keyword x_units: The two independent axis units @type x_units: C{list} of C{string}s @return: A two dimensional spectrum with the parameter as the x-axis and the given spectra axes as the y-axis. @rtype: C{SOM.SOM} """ import array_manip import dr_lib import hlr_utils import nessi_list import SOM import utils # Check for rebinning axis try: rebin_axis = kwargs["rebin_axis"] except KeyError: rebin_axis = None # Check for pixnorm flag try: pixnorm = kwargs["pixnorm"] except KeyError: pixnorm = False try: binnorm = kwargs["binnorm"] except KeyError: binnorm = False # Check for prnorm flag try: prpar = kwargs["prnorm"] prnorm = True except KeyError: prnorm = False # Check dataType keyword argument. An offset will be set to 1 for the # histogram type and 0 for either density or coordinate try: data_type = kwargs["data_type"] if data_type.lower() == "histogram": offset = 1 elif data_type.lower() == "density" or \ data_type.lower() == "coordinate": offset = 0 else: raise RuntimeError("Do not understand data type given: %s" % \ data_type) # Default is offset for histogram except KeyError: offset = 1 # Setup some variables dim = 2 N_tot = 1 # Create 2D spectrum object so_dim = SOM.SO(dim) # Set the axis locations param_axis_loc = 0 arb_axis_loc = 1 # Rebin original data to rebin_axis if necessary if rebin_axis is not None: (som1, som2) = dr_lib.rebin_axis_1D_frac(som, rebin_axis) len_arb_axis = len(rebin_axis) - offset so_dim.axis[arb_axis_loc].val = rebin_axis else: som1 = som len_arb_axis = len(som[0].axis[0].val) - offset so_dim.axis[arb_axis_loc].val = som[0].axis[0].val del som # Get parameter axis information len_param_axis = len(param_axis) - offset so_dim.axis[param_axis_loc].val = param_axis if pixnorm: pixarr = nessi_list.NessiList(len_param_axis) if prnorm: prarr = [] for i in xrange(len_param_axis): prarr.append(nessi_list.NessiList()) # Get the parameters for all the spectra ppfunc = hlr_utils.__getattribute__("param_array") prarr_lookup = ppfunc(som1, prpar) # Get the parameter lookup array pfunc = hlr_utils.__getattribute__(param_func) lookup_array = pfunc(som1, param) # Create y and var_y lists from total 2D size N_tot = len_param_axis * len_arb_axis so_dim.y = nessi_list.NessiList(N_tot) so_dim.var_y = nessi_list.NessiList(N_tot) if rebin_axis is not None: frac_area = nessi_list.NessiList(N_tot) frac_area_err2 = nessi_list.NessiList(N_tot) # Loop through data and create 2D spectrum len_som = hlr_utils.get_length(som1) for i in xrange(len_som): val = hlr_utils.get_value(som1, i, "SOM", "y") err2 = hlr_utils.get_err2(som1, i, "SOM", "y") bin_index = utils.bisect_helper(param_axis, lookup_array[i]) start = bin_index * len_arb_axis if pixnorm: pixarr[bin_index] += 1 if prnorm: prarr[bin_index].append(prarr_lookup[i]) (so_dim.y, so_dim.var_y) = array_manip.add_ncerr(so_dim.y, so_dim.var_y, val, err2, a_start=start) if rebin_axis is not None: val1 = hlr_utils.get_value(som2, i, "SOM", "y") err1_2 = hlr_utils.get_err2(som2, i, "SOM", "y") (frac_area, frac_area_err2) = array_manip.add_ncerr(frac_area, frac_area_err2, val1, err1_2, a_start=start) if rebin_axis is not None: (so_dim.y, so_dim.var_y) = array_manip.div_ncerr(so_dim.y, so_dim.var_y, frac_area, frac_area_err2) # If parameter range normalization enabled, find the range for the # parameter if prnorm: import math prrange = nessi_list.NessiList(len_param_axis) for i in xrange(len(prrange)): try: max_val = max(prarr[i]) except ValueError: max_val = 0.0 try: min_val = min(prarr[i]) except ValueError: min_val = 0.0 prrange[i] = math.fabs(max_val - min_val) # If pixel normalization tracking enabled, divided slices by pixel counts if pixnorm or prnorm: tmp_y = nessi_list.NessiList(N_tot) tmp_var_y = nessi_list.NessiList(N_tot) for i in range(len_param_axis): start = i * len_arb_axis end = (i + 1) * len_arb_axis slice_y = so_dim.y[start:end] slice_var_y = so_dim.var_y[start:end] divconst = 1.0 if pixnorm: divconst *= pixarr[i] # Scale division constant if parameter range normalization enabled if prnorm: divconst *= prrange[i] (dslice_y, dslice_var_y) = array_manip.div_ncerr(slice_y, slice_var_y, divconst, 0.0) (tmp_y, tmp_var_y) = array_manip.add_ncerr(tmp_y, tmp_var_y, dslice_y, dslice_var_y, a_start=start) so_dim.y = tmp_y so_dim.var_y = tmp_var_y if binnorm: tmp_y = nessi_list.NessiList(N_tot) tmp_var_y = nessi_list.NessiList(N_tot) if rebin_axis is not None: bin_const = utils.calc_bin_widths(rebin_axis) else: bin_const = utils.calc_bin_widths(som1[0].axis[1].val) for i in range(len_param_axis): start = i * len_arb_axis end = (i + 1) * len_arb_axis slice_y = so_dim.y[start:end] slice_var_y = so_dim.var_y[start:end] (dslice_y, dslice_var_y) = array_manip.mult_ncerr(slice_y, slice_var_y, bin_const[0], bin_const[1]) (tmp_y, tmp_var_y) = array_manip.add_ncerr(tmp_y, tmp_var_y, dslice_y, dslice_var_y, a_start=start) so_dim.y = tmp_y so_dim.var_y = tmp_var_y # Create final 2D spectrum object container comb_som = SOM.SOM() comb_som.copyAttributes(som1) del som1 # Check for so_id keyword argument try: so_dim.id = kwargs["so_id"] except KeyError: so_dim.id = 0 # Check for y_label keyword argument try: comb_som.setYLabel(kwargs["y_label"]) except KeyError: comb_som.setYLabel("Counts") # Check for y_units keyword argument try: comb_som.setYUnits(kwargs["y_units"]) except KeyError: comb_som.setYUnits("Counts / Arb") # Check for x_label keyword argument try: comb_som.setAllAxisLabels(kwargs["x_labels"]) except KeyError: comb_som.setAllAxisLabels(["Parameter", "Arbitrary"]) # Check for x_units keyword argument try: comb_som.setAllAxisUnits(kwargs["x_units"]) except KeyError: comb_som.setAllAxisUnits(["Arb", "Arb"]) comb_som.append(so_dim) del so_dim return comb_som
def tof_to_wavelength_lin_time_zero(obj, **kwargs): """ This function converts a primary axis of a C{SOM} or C{SO} from time-of-flight to wavelength incorporating a linear time zero which is a described as a linear function of the wavelength. The time-of-flight axis for a C{SOM} must be in units of I{microseconds}. The primary axis of a C{SO} is assumed to be in units of I{microseconds}. A C{tuple} of C{(tof, tof_err2)} (assumed to be in units of I{microseconds}) can be converted to C{(wavelength, wavelength_err2)}. @param obj: Object to be converted @type obj: C{SOM.SOM}, C{SOM.SO} or C{tuple} @param kwargs: A list of keyword arguments that the function accepts: @keyword pathlength: The pathlength and its associated error^2 @type pathlength: C{tuple} or C{list} of C{tuple}s @keyword time_zero_slope: The time zero slope and its associated error^2 @type time_zero_slope: C{tuple} @keyword time_zero_offset: The time zero offset and its associated error^2 @type time_zero_offset: C{tuple} @keyword inst_param: The type of parameter requested from an associated instrument. For this function the acceptable parameters are I{primary}, I{secondary} and I{total}. Default is I{primary}. @type inst_param: C{string} @keyword lojac: A flag that allows one to turn off the calculation of the linear-order Jacobian. The default action is I{True} for histogram data. @type lojac: C{boolean} @keyword units: The expected units for this function. The default for this function is I{microseconds}. @type units: C{string} @keyword cut_val: Specify a wavelength to cut the spectra at. @type cut_val: C{float} @keyword cut_less: A flag that specifies cutting the spectra less than C{cut_val}. The default is C{True}. @type cut_less: C{boolean} @return: Object with a primary axis in time-of-flight converted to wavelength @rtype: C{SOM.SOM}, C{SOM.SO} or C{tuple} @raise TypeError: The incoming object is not a type the function recognizes @raise RuntimeError: The C{SOM} x-axis units are not I{microseconds} @raise RuntimeError: A C{SOM} does not contain an instrument and no pathlength was provided @raise RuntimeError: No C{SOM} is provided and no pathlength given """ # import the helper functions import hlr_utils # set up for working through data (result, res_descr) = hlr_utils.empty_result(obj) o_descr = hlr_utils.get_descr(obj) # Setup keyword arguments try: inst_param = kwargs["inst_param"] except KeyError: inst_param = "primary" try: pathlength = kwargs["pathlength"] except KeyError: pathlength = None try: time_zero_slope = kwargs["time_zero_slope"] except KeyError: time_zero_slope = None # Current constants for Time Zero Slope TIME_ZERO_SLOPE = (float(0.0), float(0.0)) try: time_zero_offset = kwargs["time_zero_offset"] except KeyError: time_zero_offset = None # Current constants for Time Zero Offset TIME_ZERO_OFFSET = (float(0.0), float(0.0)) try: lojac = kwargs["lojac"] except KeyError: lojac = hlr_utils.check_lojac(obj) try: units = kwargs["units"] except KeyError: units = "microseconds" try: cut_val = kwargs["cut_val"] except KeyError: cut_val = None try: cut_less = kwargs["cut_less"] except KeyError: cut_less = True # Primary axis for transformation. If a SO is passed, the function, will # assume the axis for transformation is at the 0 position if o_descr == "SOM": axis = hlr_utils.one_d_units(obj, units) else: axis = 0 result = hlr_utils.copy_som_attr(result, res_descr, obj, o_descr) if res_descr == "SOM": result = hlr_utils.force_units(result, "Angstroms", axis) result.setAxisLabel(axis, "wavelength") result.setYUnits("Counts/A") result.setYLabel("Intensity") else: pass if pathlength is not None: p_descr = hlr_utils.get_descr(pathlength) else: if o_descr == "SOM": try: obj.attr_list.instrument.get_primary() inst = obj.attr_list.instrument except RuntimeError: raise RuntimeError("A detector was not provided") else: raise RuntimeError("If no SOM is provided, then pathlength "\ +"information must be provided") if time_zero_slope is not None: t_0_slope_descr = hlr_utils.get_descr(time_zero_slope) else: if o_descr == "SOM": try: t_0_slope = obj.attr_list["Time_zero_slope"][0] t_0_slope_err2 = obj.attr_list["Time_zero_slope"][1] except KeyError: t_0_slope = TIME_ZERO_SLOPE[0] t_0_slope_err2 = TIME_ZERO_SLOPE[1] else: t_0_slope = TIME_ZERO_SLOPE[0] t_0_slope_err2 = TIME_ZERO_SLOPE[1] if time_zero_offset is not None: t_0_offset_descr = hlr_utils.get_descr(time_zero_offset) else: if o_descr == "SOM": try: t_0_offset = obj.attr_list["Time_zero_offset"][0] t_0_offset_err2 = obj.attr_list["Time_zero_offset"][1] except KeyError: t_0_offset = TIME_ZERO_OFFSET[0] t_0_offset_err2 = TIME_ZERO_OFFSET[1] else: t_0_offset = TIME_ZERO_OFFSET[0] t_0_offset_err2 = TIME_ZERO_OFFSET[1] # iterate through the values import axis_manip if lojac or cut_val is not None: import utils for i in xrange(hlr_utils.get_length(obj)): val = hlr_utils.get_value(obj, i, o_descr, "x", axis) err2 = hlr_utils.get_err2(obj, i, o_descr, "x", axis) map_so = hlr_utils.get_map_so(obj, None, i) if pathlength is None: (pl, pl_err2) = hlr_utils.get_parameter(inst_param, map_so, inst) else: pl = hlr_utils.get_value(pathlength, i, p_descr) pl_err2 = hlr_utils.get_err2(pathlength, i, p_descr) if time_zero_slope is not None: t_0_slope = hlr_utils.get_value(time_zero_slope, i, t_0_slope_descr) t_0_slope_err2 = hlr_utils.get_err2(time_zero_slope, i, t_0_slope_descr) else: pass if time_zero_offset is not None: t_0_offset = hlr_utils.get_value(time_zero_offset, i, t_0_offset_descr) t_0_offset_err2 = hlr_utils.get_err2(time_zero_offset, i, t_0_offset_descr) else: pass value = axis_manip.tof_to_wavelength_lin_time_zero( val, err2, pl, pl_err2, t_0_slope, t_0_slope_err2, t_0_offset, t_0_offset_err2) if cut_val is not None: index = utils.bisect_helper(value[0], cut_val) if cut_less: # Need to cut at this index, so increment by one index += 1 value[0].__delslice__(0, index) value[1].__delslice__(0, index) map_so.y.__delslice__(0, index) map_so.var_y.__delslice__(0, index) if lojac: val.__delslice__(0, index) err2.__delslice__(0, index) else: len_data = len(value[0]) # All axis arrays need starting index adjusted by one since # they always carry one more bin than the data value[0].__delslice__(index + 1, len_data) value[1].__delslice__(index + 1, len_data) map_so.y.__delslice__(index, len_data) map_so.var_y.__delslice__(index, len_data) if lojac: val.__delslice__(index + 1, len_data) err2.__delslice__(index + 1, len_data) if lojac: counts = utils.linear_order_jacobian(val, value[0], map_so.y, map_so.var_y) hlr_utils.result_insert(result, res_descr, counts, map_so, "all", axis, [value[0]]) else: hlr_utils.result_insert(result, res_descr, value, map_so, "x", axis) return result
def process_reflp_data(datalist, conf, roi_file, bkg_roi_file=None, no_bkg=False, **kwargs): """ This function combines Steps 1 through 3 in section 2.4.6.1 of the data reduction process for Reduction from TOF to lambda_T as specified by the document at U{http://neutrons.ornl.gov/asg/projects/SCL/reqspec/DR_Lib_RS.doc}. The function takes a list of file names, a L{hlr_utils.Configure} object, region-of-interest (ROI) file for the normalization dataset, a background region-of-interest (ROI) file and an optional flag about background subtractionand processes the data accordingly. @param datalist: The filenames of the data to be processed @type datalist: C{list} of C{string}s @param conf: Object that contains the current setup of the driver @type conf: L{hlr_utils.Configure} @param roi_file: The file containing the list of pixel IDs for the region of interest. This only applies to normalization data. @type roi_file: C{string} @param bkg_roi_file: The file containing the list of pixel IDs for the (possible) background region of interest. @type bkg_roi_file: C{string} @param no_bkg: (OPTIONAL) Flag which determines if the background will be calculated and subtracted. @type no_bkg: C{boolean} @param kwargs: A list of keyword arguments that the function accepts: @keyword inst_geom_dst: File object that contains instrument geometry information. @type inst_geom_dst: C{DST.GeomDST} @keyword timer: Timing object so the function can perform timing estimates. @type timer: C{sns_timer.DiffTime} @return: Object that has undergone all requested processing steps @rtype: C{SOM.SOM} """ import hlr_utils import common_lib import dr_lib # Check keywords try: i_geom_dst = kwargs["inst_geom_dst"] except KeyError: i_geom_dst = None try: t = kwargs["timer"] except KeyError: t = None if roi_file is not None: # Normalization dataset_type = "norm" else: # Sample data dataset_type = "data" so_axis = "time_of_flight" # Step 0: Open data files and select ROI (if necessary) if conf.verbose: print "Reading %s file" % dataset_type if len(conf.norm_data_paths) and dataset_type == "norm": data_path = conf.norm_data_paths.toPath() else: data_path = conf.data_paths.toPath() (d_som1, b_som1) = dr_lib.add_files_bg(datalist, Data_Paths=data_path, SO_Axis=so_axis, dataset_type=dataset_type, Signal_ROI=roi_file, Bkg_ROI=bkg_roi_file, Verbose=conf.verbose, Timer=t) if t is not None: t.getTime(msg="After reading %s " % dataset_type) # Override geometry if necessary if i_geom_dst is not None: i_geom_dst.setGeometry(conf.data_paths.toPath(), d_som1) if dataset_type == "data": # Get TOF bin width conf.delta_TOF = d_som1[0].axis[0].val[1] - d_som1[0].axis[0].val[0] if conf.mon_norm: if conf.verbose: print "Reading in monitor data from %s file" % dataset_type # The [0] is to get the data SOM and ignore the None background SOM dm_som1 = dr_lib.add_files(datalist, Data_Paths=conf.mon_path.toPath(), SO_Axis=so_axis, dataset_type=dataset_type, Verbose=conf.verbose, Timer=t) if t is not None: t.getTime(msg="After reading monitor data ") else: dm_som1 = None # Step 1: Sum all spectra along the low resolution direction # Set sorting for REF_L if conf.verbose: print "Summing over low resolution direction" # Set sorting (y_sort, cent_pixel) = hlr_utils.get_ref_integration_direction( conf.int_dir, conf.inst, d_som1.attr_list.instrument) if t is not None: t.getTime(False) d_som2 = dr_lib.sum_all_spectra(d_som1, y_sort=y_sort, stripe=True, pixel_fix=cent_pixel) if b_som1 is not None: b_som2 = dr_lib.sum_all_spectra(b_som1, y_sort=y_sort, stripe=True, pixel_fix=cent_pixel) del b_som1 else: b_som2 = b_som1 if t is not None: t.getTime(msg="After summing low resolution direction ") del d_som1 # Determine background spectrum if conf.verbose and not no_bkg: print "Determining %s background" % dataset_type if b_som2 is not None: B = dr_lib.calculate_ref_background(b_som2, no_bkg, conf.inst, None, aobj=d_som2) if t is not None: t.getTime(msg="After background determination") # Subtract background spectrum from data spectra if not no_bkg: d_som3 = dr_lib.subtract_bkg_from_data(d_som2, B, verbose=conf.verbose, timer=t, dataset1="data", dataset2="background") else: d_som3 = d_som2 del d_som2 # Zero the spectra if necessary if roi_file is None and (conf.tof_cut_min is not None or \ conf.tof_cut_max is not None): import utils # Find the indicies for the non zero range if conf.tof_cut_min is None: conf.TOF_min = d_som3[0].axis[0].val[0] start_index = 0 else: start_index = utils.bisect_helper(d_som3[0].axis[0].val, conf.tof_cut_min) if conf.tof_cut_max is None: conf.TOF_max = d_som3[0].axis[0].val[-1] end_index = len(d_som3[0].axis[0].val) - 1 else: end_index = utils.bisect_helper(d_som3[0].axis[0].val, conf.tof_cut_max) nz_list = [] for i in xrange(hlr_utils.get_length(d_som3)): nz_list.append((start_index, end_index)) d_som4 = dr_lib.zero_spectra(d_som3, nz_list, use_bin_index=True) else: conf.TOF_min = d_som3[0].axis[0].val[0] conf.TOF_max = d_som3[0].axis[0].val[-1] d_som4 = d_som3 del d_som3 # Step N: Convert TOF to wavelength if conf.verbose: print "Converting TOF to wavelength" if t is not None: t.getTime(False) d_som5 = common_lib.tof_to_wavelength(d_som4, inst_param="total", units="microsecond") if dm_som1 is not None: dm_som2 = common_lib.tof_to_wavelength(dm_som1, units="microsecond") else: dm_som2 = None del dm_som1 if t is not None: t.getTime(msg="After converting TOF to wavelength ") del d_som4 if conf.mon_norm: dm_som3 = dr_lib.rebin_monitor(dm_som2, d_som5, rtype="frac") else: dm_som3 = None del dm_som2 if not conf.mon_norm: # Step 2: Multiply the spectra by the proton charge if conf.verbose: print "Multiply spectra by proton charge" pc_tag = dataset_type + "-proton_charge" proton_charge = d_som5.attr_list[pc_tag] if t is not None: t.getTime(False) d_som6 = common_lib.div_ncerr(d_som5, (proton_charge.getValue(), 0.0)) if t is not None: t.getTime(msg="After scaling by proton charge ") else: if conf.verbose: print "Normalize by monitor spectrum" if t is not None: t.getTime(False) d_som6 = common_lib.div_ncerr(d_som5, dm_som3) if t is not None: t.getTime(msg="After monitor normalization ") del d_som5, dm_som3 if roi_file is None: return d_som6 else: # Step 3: Make one spectrum for normalization dataset # Need to create a final rebinning axis pathlength = d_som6.attr_list.instrument.get_total_path( det_secondary=True) delta_lambda = common_lib.tof_to_wavelength((conf.delta_TOF, 0.0), pathlength=pathlength) lambda_bins = dr_lib.create_axis_from_data(d_som6, width=delta_lambda[0]) return dr_lib.sum_by_rebin_frac(d_som6, lambda_bins.toNessiList())
def tof_to_wavelength_lin_time_zero(obj, **kwargs): """ This function converts a primary axis of a C{SOM} or C{SO} from time-of-flight to wavelength incorporating a linear time zero which is a described as a linear function of the wavelength. The time-of-flight axis for a C{SOM} must be in units of I{microseconds}. The primary axis of a C{SO} is assumed to be in units of I{microseconds}. A C{tuple} of C{(tof, tof_err2)} (assumed to be in units of I{microseconds}) can be converted to C{(wavelength, wavelength_err2)}. @param obj: Object to be converted @type obj: C{SOM.SOM}, C{SOM.SO} or C{tuple} @param kwargs: A list of keyword arguments that the function accepts: @keyword pathlength: The pathlength and its associated error^2 @type pathlength: C{tuple} or C{list} of C{tuple}s @keyword time_zero_slope: The time zero slope and its associated error^2 @type time_zero_slope: C{tuple} @keyword time_zero_offset: The time zero offset and its associated error^2 @type time_zero_offset: C{tuple} @keyword inst_param: The type of parameter requested from an associated instrument. For this function the acceptable parameters are I{primary}, I{secondary} and I{total}. Default is I{primary}. @type inst_param: C{string} @keyword lojac: A flag that allows one to turn off the calculation of the linear-order Jacobian. The default action is I{True} for histogram data. @type lojac: C{boolean} @keyword units: The expected units for this function. The default for this function is I{microseconds}. @type units: C{string} @keyword cut_val: Specify a wavelength to cut the spectra at. @type cut_val: C{float} @keyword cut_less: A flag that specifies cutting the spectra less than C{cut_val}. The default is C{True}. @type cut_less: C{boolean} @return: Object with a primary axis in time-of-flight converted to wavelength @rtype: C{SOM.SOM}, C{SOM.SO} or C{tuple} @raise TypeError: The incoming object is not a type the function recognizes @raise RuntimeError: The C{SOM} x-axis units are not I{microseconds} @raise RuntimeError: A C{SOM} does not contain an instrument and no pathlength was provided @raise RuntimeError: No C{SOM} is provided and no pathlength given """ # import the helper functions import hlr_utils # set up for working through data (result, res_descr) = hlr_utils.empty_result(obj) o_descr = hlr_utils.get_descr(obj) # Setup keyword arguments try: inst_param = kwargs["inst_param"] except KeyError: inst_param = "primary" try: pathlength = kwargs["pathlength"] except KeyError: pathlength = None try: time_zero_slope = kwargs["time_zero_slope"] except KeyError: time_zero_slope = None # Current constants for Time Zero Slope TIME_ZERO_SLOPE = (float(0.0), float(0.0)) try: time_zero_offset = kwargs["time_zero_offset"] except KeyError: time_zero_offset = None # Current constants for Time Zero Offset TIME_ZERO_OFFSET = (float(0.0), float(0.0)) try: lojac = kwargs["lojac"] except KeyError: lojac = hlr_utils.check_lojac(obj) try: units = kwargs["units"] except KeyError: units = "microseconds" try: cut_val = kwargs["cut_val"] except KeyError: cut_val = None try: cut_less = kwargs["cut_less"] except KeyError: cut_less = True # Primary axis for transformation. If a SO is passed, the function, will # assume the axis for transformation is at the 0 position if o_descr == "SOM": axis = hlr_utils.one_d_units(obj, units) else: axis = 0 result = hlr_utils.copy_som_attr(result, res_descr, obj, o_descr) if res_descr == "SOM": result = hlr_utils.force_units(result, "Angstroms", axis) result.setAxisLabel(axis, "wavelength") result.setYUnits("Counts/A") result.setYLabel("Intensity") else: pass if pathlength is not None: p_descr = hlr_utils.get_descr(pathlength) else: if o_descr == "SOM": try: obj.attr_list.instrument.get_primary() inst = obj.attr_list.instrument except RuntimeError: raise RuntimeError("A detector was not provided") else: raise RuntimeError("If no SOM is provided, then pathlength "\ +"information must be provided") if time_zero_slope is not None: t_0_slope_descr = hlr_utils.get_descr(time_zero_slope) else: if o_descr == "SOM": try: t_0_slope = obj.attr_list["Time_zero_slope"][0] t_0_slope_err2 = obj.attr_list["Time_zero_slope"][1] except KeyError: t_0_slope = TIME_ZERO_SLOPE[0] t_0_slope_err2 = TIME_ZERO_SLOPE[1] else: t_0_slope = TIME_ZERO_SLOPE[0] t_0_slope_err2 = TIME_ZERO_SLOPE[1] if time_zero_offset is not None: t_0_offset_descr = hlr_utils.get_descr(time_zero_offset) else: if o_descr == "SOM": try: t_0_offset = obj.attr_list["Time_zero_offset"][0] t_0_offset_err2 = obj.attr_list["Time_zero_offset"][1] except KeyError: t_0_offset = TIME_ZERO_OFFSET[0] t_0_offset_err2 = TIME_ZERO_OFFSET[1] else: t_0_offset = TIME_ZERO_OFFSET[0] t_0_offset_err2 = TIME_ZERO_OFFSET[1] # iterate through the values import axis_manip if lojac or cut_val is not None: import utils for i in xrange(hlr_utils.get_length(obj)): val = hlr_utils.get_value(obj, i, o_descr, "x", axis) err2 = hlr_utils.get_err2(obj, i, o_descr, "x", axis) map_so = hlr_utils.get_map_so(obj, None, i) if pathlength is None: (pl, pl_err2) = hlr_utils.get_parameter(inst_param, map_so, inst) else: pl = hlr_utils.get_value(pathlength, i, p_descr) pl_err2 = hlr_utils.get_err2(pathlength, i, p_descr) if time_zero_slope is not None: t_0_slope = hlr_utils.get_value(time_zero_slope, i, t_0_slope_descr) t_0_slope_err2 = hlr_utils.get_err2(time_zero_slope, i, t_0_slope_descr) else: pass if time_zero_offset is not None: t_0_offset = hlr_utils.get_value(time_zero_offset, i, t_0_offset_descr) t_0_offset_err2 = hlr_utils.get_err2(time_zero_offset, i, t_0_offset_descr) else: pass value = axis_manip.tof_to_wavelength_lin_time_zero(val, err2, pl, pl_err2, t_0_slope, t_0_slope_err2, t_0_offset, t_0_offset_err2) if cut_val is not None: index = utils.bisect_helper(value[0], cut_val) if cut_less: # Need to cut at this index, so increment by one index += 1 value[0].__delslice__(0, index) value[1].__delslice__(0, index) map_so.y.__delslice__(0, index) map_so.var_y.__delslice__(0, index) if lojac: val.__delslice__(0, index) err2.__delslice__(0, index) else: len_data = len(value[0]) # All axis arrays need starting index adjusted by one since # they always carry one more bin than the data value[0].__delslice__(index + 1, len_data) value[1].__delslice__(index + 1, len_data) map_so.y.__delslice__(index, len_data) map_so.var_y.__delslice__(index, len_data) if lojac: val.__delslice__(index + 1, len_data) err2.__delslice__(index + 1, len_data) if lojac: counts = utils.linear_order_jacobian(val, value[0], map_so.y, map_so.var_y) hlr_utils.result_insert(result, res_descr, counts, map_so, "all", axis, [value[0]]) else: hlr_utils.result_insert(result, res_descr, value, map_so, "x", axis) return result
def dimensionless_mon(obj, min_ext, max_ext, **kwargs): """ This function takes monitor spectra and converts them to dimensionless spectra by dividing each spectrum by the total number of counts within the range [min_ext, max_ext]. Then, each spectrum is multiplied by the quantity max_ext - min_ext. The units of min_ext and max_ext are assumed to be the same as the monitor spectra axis. @param obj: Object containing monitor spectra @type obj: C{SOM.SOM} or C{SOM.SO} @param min_ext: Minimium range and associated error^2 for integrating total counts. @type min_ext: C{tuple} @param max_ext: Maximium range and associated error^2 for integrating total counts. @type max_ext: C{tuple} @param kwargs: A list of keyword arguments that the function accepts: @keyword units: The expected units for this function. The default for this function is I{Angstroms}. @type units: C{string} @return: Dimensionless monitor spectra @rtype: C{SOM.SOM} or C{SOM.SO} """ # import the helper functions import hlr_utils if obj is None: return obj # set up for working through data (result, res_descr) = hlr_utils.empty_result(obj) o_descr = hlr_utils.get_descr(obj) # Setup keyword arguments try: units = kwargs["units"] except KeyError: units = "Angstroms" # Primary axis for transformation. If a SO is passed, the function, will # assume the axis for transformation is at the 0 position if o_descr == "SOM": axis = hlr_utils.one_d_units(obj, units) else: axis = 0 result = hlr_utils.copy_som_attr(result, res_descr, obj, o_descr) import array_manip import dr_lib import utils for i in xrange(hlr_utils.get_length(obj)): val = hlr_utils.get_value(obj, i, o_descr, "y") err2 = hlr_utils.get_err2(obj, i, o_descr, "y") x_axis = hlr_utils.get_value(obj, i, o_descr, "x", axis) x_err2 = hlr_utils.get_err2(obj, i, o_descr, "x", axis) map_so = hlr_utils.get_map_so(obj, None, i) bin_widths = utils.calc_bin_widths(x_axis, x_err2) # Scale bin contents by bin width value0 = array_manip.mult_ncerr(val, err2, bin_widths[0], bin_widths[1]) # Find bin range for extents min_index = utils.bisect_helper(x_axis, min_ext[0]) max_index = utils.bisect_helper(x_axis, max_ext[0]) # Integrate axis using bin width multiplication (asum, asum_err2) = dr_lib.integrate_axis_py(map_so, start=min_index, end=max_index, width=True) # Get the number of bins in the integration range num_bins = max_index - min_index + 1 asum /= num_bins asum_err2 /= (num_bins * num_bins) # Divide by sum value1 = array_manip.div_ncerr(value0[0], value0[1], asum, asum_err2) hlr_utils.result_insert(result, res_descr, value1, map_so, "y") return result
def process_reflp_data(datalist, conf, roi_file, bkg_roi_file=None, no_bkg=False, **kwargs): """ This function combines Steps 1 through 3 in section 2.4.6.1 of the data reduction process for Reduction from TOF to lambda_T as specified by the document at U{http://neutrons.ornl.gov/asg/projects/SCL/reqspec/DR_Lib_RS.doc}. The function takes a list of file names, a L{hlr_utils.Configure} object, region-of-interest (ROI) file for the normalization dataset, a background region-of-interest (ROI) file and an optional flag about background subtractionand processes the data accordingly. @param datalist: The filenames of the data to be processed @type datalist: C{list} of C{string}s @param conf: Object that contains the current setup of the driver @type conf: L{hlr_utils.Configure} @param roi_file: The file containing the list of pixel IDs for the region of interest. This only applies to normalization data. @type roi_file: C{string} @param bkg_roi_file: The file containing the list of pixel IDs for the (possible) background region of interest. @type bkg_roi_file: C{string} @param no_bkg: (OPTIONAL) Flag which determines if the background will be calculated and subtracted. @type no_bkg: C{boolean} @param kwargs: A list of keyword arguments that the function accepts: @keyword inst_geom_dst: File object that contains instrument geometry information. @type inst_geom_dst: C{DST.GeomDST} @keyword timer: Timing object so the function can perform timing estimates. @type timer: C{sns_timer.DiffTime} @return: Object that has undergone all requested processing steps @rtype: C{SOM.SOM} """ import hlr_utils import common_lib import dr_lib # Check keywords try: i_geom_dst = kwargs["inst_geom_dst"] except KeyError: i_geom_dst = None try: t = kwargs["timer"] except KeyError: t = None if roi_file is not None: # Normalization dataset_type = "norm" else: # Sample data dataset_type = "data" so_axis = "time_of_flight" # Step 0: Open data files and select ROI (if necessary) if conf.verbose: print "Reading %s file" % dataset_type if len(conf.norm_data_paths) and dataset_type == "norm": data_path = conf.norm_data_paths.toPath() else: data_path = conf.data_paths.toPath() (d_som1, b_som1) = dr_lib.add_files_bg(datalist, Data_Paths=data_path, SO_Axis=so_axis, dataset_type=dataset_type, Signal_ROI=roi_file, Bkg_ROI=bkg_roi_file, Verbose=conf.verbose, Timer=t) if t is not None: t.getTime(msg="After reading %s " % dataset_type) # Override geometry if necessary if i_geom_dst is not None: i_geom_dst.setGeometry(conf.data_paths.toPath(), d_som1) if dataset_type == "data": # Get TOF bin width conf.delta_TOF = d_som1[0].axis[0].val[1] - d_som1[0].axis[0].val[0] if conf.mon_norm: if conf.verbose: print "Reading in monitor data from %s file" % dataset_type # The [0] is to get the data SOM and ignore the None background SOM dm_som1 = dr_lib.add_files(datalist, Data_Paths=conf.mon_path.toPath(), SO_Axis=so_axis, dataset_type=dataset_type, Verbose=conf.verbose, Timer=t) if t is not None: t.getTime(msg="After reading monitor data ") else: dm_som1 = None # Step 1: Sum all spectra along the low resolution direction # Set sorting for REF_L if conf.verbose: print "Summing over low resolution direction" # Set sorting (y_sort, cent_pixel) = hlr_utils.get_ref_integration_direction(conf.int_dir, conf.inst, d_som1.attr_list.instrument) if t is not None: t.getTime(False) d_som2 = dr_lib.sum_all_spectra(d_som1, y_sort=y_sort, stripe=True, pixel_fix=cent_pixel) if b_som1 is not None: b_som2 = dr_lib.sum_all_spectra(b_som1, y_sort=y_sort, stripe=True, pixel_fix=cent_pixel) del b_som1 else: b_som2 = b_som1 if t is not None: t.getTime(msg="After summing low resolution direction ") del d_som1 # Determine background spectrum if conf.verbose and not no_bkg: print "Determining %s background" % dataset_type if b_som2 is not None: B = dr_lib.calculate_ref_background(b_som2, no_bkg, conf.inst, None, aobj=d_som2) if t is not None: t.getTime(msg="After background determination") # Subtract background spectrum from data spectra if not no_bkg: d_som3 = dr_lib.subtract_bkg_from_data(d_som2, B, verbose=conf.verbose, timer=t, dataset1="data", dataset2="background") else: d_som3 = d_som2 del d_som2 # Zero the spectra if necessary if roi_file is None and (conf.tof_cut_min is not None or \ conf.tof_cut_max is not None): import utils # Find the indicies for the non zero range if conf.tof_cut_min is None: conf.TOF_min = d_som3[0].axis[0].val[0] start_index = 0 else: start_index = utils.bisect_helper(d_som3[0].axis[0].val, conf.tof_cut_min) if conf.tof_cut_max is None: conf.TOF_max = d_som3[0].axis[0].val[-1] end_index = len(d_som3[0].axis[0].val) - 1 else: end_index = utils.bisect_helper(d_som3[0].axis[0].val, conf.tof_cut_max) nz_list = [] for i in xrange(hlr_utils.get_length(d_som3)): nz_list.append((start_index, end_index)) d_som4 = dr_lib.zero_spectra(d_som3, nz_list, use_bin_index=True) else: conf.TOF_min = d_som3[0].axis[0].val[0] conf.TOF_max = d_som3[0].axis[0].val[-1] d_som4 = d_som3 del d_som3 # Step N: Convert TOF to wavelength if conf.verbose: print "Converting TOF to wavelength" if t is not None: t.getTime(False) d_som5 = common_lib.tof_to_wavelength(d_som4, inst_param="total", units="microsecond") if dm_som1 is not None: dm_som2 = common_lib.tof_to_wavelength(dm_som1, units="microsecond") else: dm_som2 = None del dm_som1 if t is not None: t.getTime(msg="After converting TOF to wavelength ") del d_som4 if conf.mon_norm: dm_som3 = dr_lib.rebin_monitor(dm_som2, d_som5, rtype="frac") else: dm_som3 = None del dm_som2 if not conf.mon_norm: # Step 2: Multiply the spectra by the proton charge if conf.verbose: print "Multiply spectra by proton charge" pc_tag = dataset_type + "-proton_charge" proton_charge = d_som5.attr_list[pc_tag] if t is not None: t.getTime(False) d_som6 = common_lib.div_ncerr(d_som5, (proton_charge.getValue(), 0.0)) if t is not None: t.getTime(msg="After scaling by proton charge ") else: if conf.verbose: print "Normalize by monitor spectrum" if t is not None: t.getTime(False) d_som6 = common_lib.div_ncerr(d_som5, dm_som3) if t is not None: t.getTime(msg="After monitor normalization ") del d_som5, dm_som3 if roi_file is None: return d_som6 else: # Step 3: Make one spectrum for normalization dataset # Need to create a final rebinning axis pathlength = d_som6.attr_list.instrument.get_total_path( det_secondary=True) delta_lambda = common_lib.tof_to_wavelength((conf.delta_TOF, 0.0), pathlength=pathlength) lambda_bins = dr_lib.create_axis_from_data(d_som6, width=delta_lambda[0]) return dr_lib.sum_by_rebin_frac(d_som6, lambda_bins.toNessiList())
def cut_spectra(obj, low_cut, high_cut, **kwargs): """ This function takes 1D histogram spectra and a given set of axis cutoff values and produces spectra that is smaller than the original by removing information outside the cut range. @param obj: The object containing the 1D histogram spectra to be zeroed @type obj: C{SOM.SOM} @param low_cut: The low-side axis cutoff. All values less than this will be discarded. @type low_cut: C{float} @param high_cut: The high-side axis cutoff. All values greater than this will be discarded. @type high_cut: C{float} @param kwargs: A list of keyword arguments that the function accepts: @keyword num_bins_clean: The number of extra bins to cut from the spectra. The operation will be performed symmetrically if both cuts are requested. @type num_bins_clean: C{int} @return: Object containing the zeroed spectra @rtype: C{SOM.SOM} """ # Kickout if both cuts are None if low_cut is None and high_cut is None: return obj # import the helper functions import hlr_utils # set up for working through data (result, res_descr) = hlr_utils.empty_result(obj) o_descr = hlr_utils.get_descr(obj) result = hlr_utils.copy_som_attr(result, res_descr, obj, o_descr) # Check keywords offset = kwargs.get("num_bins_clean", 0) # iterate through the values import utils # Get object length len_obj = hlr_utils.get_length(obj) for i in xrange(len_obj): map_so = hlr_utils.get_map_so(obj, None, i) axis = hlr_utils.get_value(obj, i, o_descr, "x", 0) if low_cut is None: low_bin = 0 else: low_bin = utils.bisect_helper(axis, low_cut) # Need to cut directly at the low bin low_bin += 1 low_bin += offset if high_cut is None: high_bin = len(axis) else: high_bin = utils.bisect_helper(axis, high_cut) high_bin -= offset if high_bin != 0: # Slice out the requested range y_new = map_so.y[low_bin:high_bin] var_y_new = map_so.var_y[low_bin:high_bin] # Need to increment the high bin for the axis since it carries one # more bin than the data axis_new = axis[low_bin:high_bin+1] else: y_new = map_so.y var_y_new = map_so.var_y axis_new = axis hlr_utils.result_insert(result, res_descr, (y_new, var_y_new), map_so, "all", 0, [axis_new]) return result
def cut_spectra(obj, low_cut, high_cut, **kwargs): """ This function takes 1D histogram spectra and a given set of axis cutoff values and produces spectra that is smaller than the original by removing information outside the cut range. @param obj: The object containing the 1D histogram spectra to be zeroed @type obj: C{SOM.SOM} @param low_cut: The low-side axis cutoff. All values less than this will be discarded. @type low_cut: C{float} @param high_cut: The high-side axis cutoff. All values greater than this will be discarded. @type high_cut: C{float} @param kwargs: A list of keyword arguments that the function accepts: @keyword num_bins_clean: The number of extra bins to cut from the spectra. The operation will be performed symmetrically if both cuts are requested. @type num_bins_clean: C{int} @return: Object containing the zeroed spectra @rtype: C{SOM.SOM} """ # Kickout if both cuts are None if low_cut is None and high_cut is None: return obj # import the helper functions import hlr_utils # set up for working through data (result, res_descr) = hlr_utils.empty_result(obj) o_descr = hlr_utils.get_descr(obj) result = hlr_utils.copy_som_attr(result, res_descr, obj, o_descr) # Check keywords offset = kwargs.get("num_bins_clean", 0) # iterate through the values import utils # Get object length len_obj = hlr_utils.get_length(obj) for i in xrange(len_obj): map_so = hlr_utils.get_map_so(obj, None, i) axis = hlr_utils.get_value(obj, i, o_descr, "x", 0) if low_cut is None: low_bin = 0 else: low_bin = utils.bisect_helper(axis, low_cut) # Need to cut directly at the low bin low_bin += 1 low_bin += offset if high_cut is None: high_bin = len(axis) else: high_bin = utils.bisect_helper(axis, high_cut) high_bin -= offset if high_bin != 0: # Slice out the requested range y_new = map_so.y[low_bin:high_bin] var_y_new = map_so.var_y[low_bin:high_bin] # Need to increment the high bin for the axis since it carries one # more bin than the data axis_new = axis[low_bin:high_bin + 1] else: y_new = map_so.y var_y_new = map_so.var_y axis_new = axis hlr_utils.result_insert(result, res_descr, (y_new, var_y_new), map_so, "all", 0, [axis_new]) return result