def dimensionless_mon(obj, min_ext, max_ext, **kwargs): """ This function takes monitor spectra and converts them to dimensionless spectra by dividing each spectrum by the total number of counts within the range [min_ext, max_ext]. Then, each spectrum is multiplied by the quantity max_ext - min_ext. The units of min_ext and max_ext are assumed to be the same as the monitor spectra axis. @param obj: Object containing monitor spectra @type obj: C{SOM.SOM} or C{SOM.SO} @param min_ext: Minimium range and associated error^2 for integrating total counts. @type min_ext: C{tuple} @param max_ext: Maximium range and associated error^2 for integrating total counts. @type max_ext: C{tuple} @param kwargs: A list of keyword arguments that the function accepts: @keyword units: The expected units for this function. The default for this function is I{Angstroms}. @type units: C{string} @return: Dimensionless monitor spectra @rtype: C{SOM.SOM} or C{SOM.SO} """ # import the helper functions import hlr_utils if obj is None: return obj # set up for working through data (result, res_descr) = hlr_utils.empty_result(obj) o_descr = hlr_utils.get_descr(obj) # Setup keyword arguments try: units = kwargs["units"] except KeyError: units = "Angstroms" # Primary axis for transformation. If a SO is passed, the function, will # assume the axis for transformation is at the 0 position if o_descr == "SOM": axis = hlr_utils.one_d_units(obj, units) else: axis = 0 result = hlr_utils.copy_som_attr(result, res_descr, obj, o_descr) import array_manip import dr_lib import utils for i in xrange(hlr_utils.get_length(obj)): val = hlr_utils.get_value(obj, i, o_descr, "y") err2 = hlr_utils.get_err2(obj, i, o_descr, "y") x_axis = hlr_utils.get_value(obj, i, o_descr, "x", axis) x_err2 = hlr_utils.get_err2(obj, i, o_descr, "x", axis) map_so = hlr_utils.get_map_so(obj, None, i) bin_widths = utils.calc_bin_widths(x_axis, x_err2) # Scale bin contents by bin width value0 = array_manip.mult_ncerr(val, err2, bin_widths[0], bin_widths[1]) # Find bin range for extents min_index = utils.bisect_helper(x_axis, min_ext[0]) max_index = utils.bisect_helper(x_axis, max_ext[0]) # Integrate axis using bin width multiplication (asum, asum_err2) = dr_lib.integrate_axis_py(map_so, start=min_index, end=max_index, width=True) # Get the number of bins in the integration range num_bins = max_index - min_index + 1 asum /= num_bins asum_err2 /= (num_bins * num_bins) # Divide by sum value1 = array_manip.div_ncerr(value0[0], value0[1], asum, asum_err2) hlr_utils.result_insert(result, res_descr, value1, map_so, "y") return result
def create_param_vs_Y(som, param, param_func, param_axis, **kwargs): """ This function takes a group of single spectrum with any given axes (wavelength, energy etc.). The function can optionally rebin those axes to a given axis. It then creates a 2D spectrum by using a parameter, parameter functiona and a given axis for the lookup locations and places each original spectrum in the found location. @param som: The input object with arbitrary (but same) axis spectra @type som: C{SOM.SOM} @param param: The parameter that will be used for creating the lookups. @type param: C{string} @param param_func: The function that will convert the parameter into the values for lookups. @type param_func: C{string} @param param_axis: The axis that will be searched for the lookup values. @type param_axis: C{nessi_list.NessiList} @param kwargs: A list of keyword arguments that the function accepts: @keyword rebin_axis: An axis to rebin the given spectra to. @type rebin_axis: C{nessi_list.NessiList} @keyword data_type: The name of the data type which can be either I{histogram}, I{density} or I{coordinate}. The default value will be I{histogram}. @type data_type: C{string} @keyword pixnorm: A flag to track the number of pixels that contribute to a bin and then normalize the bin by that number. @type pixnorm: C{boolean} @keyword prnorm: A parameter to track and determine a range (max - min) for each bin the requested parameter axis. The range will then be divided into the final summed spectrum for the given bin. @type prnorm: C{string} @keyword binnorm: A flag that turns on the scaling of each stripe of the y-axis by the individual bins widths from the y-axis. @type binnorm: C{boolean} @keyword so_id: The identifier represents a number, string, tuple or other object that describes the resulting C{SO}. @type so_id: C{int}, C{string}, C{tuple}, C{pixel ID} @keyword y_label: The dependent axis label @type y_label: C{string} @keyword y_units: The dependent axis units @type y_units: C{string} @keyword x_labels: The two independent axis labels @type x_labels: C{list} of C{string}s @keyword x_units: The two independent axis units @type x_units: C{list} of C{string}s @return: A two dimensional spectrum with the parameter as the x-axis and the given spectra axes as the y-axis. @rtype: C{SOM.SOM} """ import array_manip import dr_lib import hlr_utils import nessi_list import SOM import utils # Check for rebinning axis try: rebin_axis = kwargs["rebin_axis"] except KeyError: rebin_axis = None # Check for pixnorm flag try: pixnorm = kwargs["pixnorm"] except KeyError: pixnorm = False try: binnorm = kwargs["binnorm"] except KeyError: binnorm = False # Check for prnorm flag try: prpar = kwargs["prnorm"] prnorm = True except KeyError: prnorm = False # Check dataType keyword argument. An offset will be set to 1 for the # histogram type and 0 for either density or coordinate try: data_type = kwargs["data_type"] if data_type.lower() == "histogram": offset = 1 elif data_type.lower() == "density" or \ data_type.lower() == "coordinate": offset = 0 else: raise RuntimeError("Do not understand data type given: %s" % \ data_type) # Default is offset for histogram except KeyError: offset = 1 # Setup some variables dim = 2 N_tot = 1 # Create 2D spectrum object so_dim = SOM.SO(dim) # Set the axis locations param_axis_loc = 0 arb_axis_loc = 1 # Rebin original data to rebin_axis if necessary if rebin_axis is not None: (som1, som2) = dr_lib.rebin_axis_1D_frac(som, rebin_axis) len_arb_axis = len(rebin_axis) - offset so_dim.axis[arb_axis_loc].val = rebin_axis else: som1 = som len_arb_axis = len(som[0].axis[0].val) - offset so_dim.axis[arb_axis_loc].val = som[0].axis[0].val del som # Get parameter axis information len_param_axis = len(param_axis) - offset so_dim.axis[param_axis_loc].val = param_axis if pixnorm: pixarr = nessi_list.NessiList(len_param_axis) if prnorm: prarr = [] for i in xrange(len_param_axis): prarr.append(nessi_list.NessiList()) # Get the parameters for all the spectra ppfunc = hlr_utils.__getattribute__("param_array") prarr_lookup = ppfunc(som1, prpar) # Get the parameter lookup array pfunc = hlr_utils.__getattribute__(param_func) lookup_array = pfunc(som1, param) # Create y and var_y lists from total 2D size N_tot = len_param_axis * len_arb_axis so_dim.y = nessi_list.NessiList(N_tot) so_dim.var_y = nessi_list.NessiList(N_tot) if rebin_axis is not None: frac_area = nessi_list.NessiList(N_tot) frac_area_err2 = nessi_list.NessiList(N_tot) # Loop through data and create 2D spectrum len_som = hlr_utils.get_length(som1) for i in xrange(len_som): val = hlr_utils.get_value(som1, i, "SOM", "y") err2 = hlr_utils.get_err2(som1, i, "SOM", "y") bin_index = utils.bisect_helper(param_axis, lookup_array[i]) start = bin_index * len_arb_axis if pixnorm: pixarr[bin_index] += 1 if prnorm: prarr[bin_index].append(prarr_lookup[i]) (so_dim.y, so_dim.var_y) = array_manip.add_ncerr(so_dim.y, so_dim.var_y, val, err2, a_start=start) if rebin_axis is not None: val1 = hlr_utils.get_value(som2, i, "SOM", "y") err1_2 = hlr_utils.get_err2(som2, i, "SOM", "y") (frac_area, frac_area_err2) = array_manip.add_ncerr(frac_area, frac_area_err2, val1, err1_2, a_start=start) if rebin_axis is not None: (so_dim.y, so_dim.var_y) = array_manip.div_ncerr(so_dim.y, so_dim.var_y, frac_area, frac_area_err2) # If parameter range normalization enabled, find the range for the # parameter if prnorm: import math prrange = nessi_list.NessiList(len_param_axis) for i in xrange(len(prrange)): try: max_val = max(prarr[i]) except ValueError: max_val = 0.0 try: min_val = min(prarr[i]) except ValueError: min_val = 0.0 prrange[i] = math.fabs(max_val - min_val) # If pixel normalization tracking enabled, divided slices by pixel counts if pixnorm or prnorm: tmp_y = nessi_list.NessiList(N_tot) tmp_var_y = nessi_list.NessiList(N_tot) for i in range(len_param_axis): start = i * len_arb_axis end = (i + 1) * len_arb_axis slice_y = so_dim.y[start:end] slice_var_y = so_dim.var_y[start:end] divconst = 1.0 if pixnorm: divconst *= pixarr[i] # Scale division constant if parameter range normalization enabled if prnorm: divconst *= prrange[i] (dslice_y, dslice_var_y) = array_manip.div_ncerr(slice_y, slice_var_y, divconst, 0.0) (tmp_y, tmp_var_y) = array_manip.add_ncerr(tmp_y, tmp_var_y, dslice_y, dslice_var_y, a_start=start) so_dim.y = tmp_y so_dim.var_y = tmp_var_y if binnorm: tmp_y = nessi_list.NessiList(N_tot) tmp_var_y = nessi_list.NessiList(N_tot) if rebin_axis is not None: bin_const = utils.calc_bin_widths(rebin_axis) else: bin_const = utils.calc_bin_widths(som1[0].axis[1].val) for i in range(len_param_axis): start = i * len_arb_axis end = (i + 1) * len_arb_axis slice_y = so_dim.y[start:end] slice_var_y = so_dim.var_y[start:end] (dslice_y, dslice_var_y) = array_manip.mult_ncerr(slice_y, slice_var_y, bin_const[0], bin_const[1]) (tmp_y, tmp_var_y) = array_manip.add_ncerr(tmp_y, tmp_var_y, dslice_y, dslice_var_y, a_start=start) so_dim.y = tmp_y so_dim.var_y = tmp_var_y # Create final 2D spectrum object container comb_som = SOM.SOM() comb_som.copyAttributes(som1) del som1 # Check for so_id keyword argument try: so_dim.id = kwargs["so_id"] except KeyError: so_dim.id = 0 # Check for y_label keyword argument try: comb_som.setYLabel(kwargs["y_label"]) except KeyError: comb_som.setYLabel("Counts") # Check for y_units keyword argument try: comb_som.setYUnits(kwargs["y_units"]) except KeyError: comb_som.setYUnits("Counts / Arb") # Check for x_label keyword argument try: comb_som.setAllAxisLabels(kwargs["x_labels"]) except KeyError: comb_som.setAllAxisLabels(["Parameter", "Arbitrary"]) # Check for x_units keyword argument try: comb_som.setAllAxisUnits(kwargs["x_units"]) except KeyError: comb_som.setAllAxisUnits(["Arb", "Arb"]) comb_som.append(so_dim) del so_dim return comb_som
def create_E_vs_Q_dgs(som, E_i, Q_final, **kwargs): """ This function starts with the rebinned energy transfer and turns this into a 2D spectra with E and Q axes for DGS instruments. @param som: The input object with initial IGS wavelength axis @type som: C{SOM.SOM} @param E_i: The initial energy for the given data. @type E_i: C{tuple} @param Q_final: The momentum transfer axis to rebin the data to @type Q_final: C{nessi_list.NessiList} @param kwargs: A list of keyword arguments that the function accepts: @keyword corner_angles: The object that contains the corner geometry information. @type corner_angles: C{dict} @keyword so_id: The identifier represents a number, string, tuple or other object that describes the resulting C{SO} @type so_id: C{int}, C{string}, C{tuple}, C{pixel ID} @keyword y_label: The y axis label @type y_label: C{string} @keyword y_units: The y axis units @type y_units: C{string} @keyword x_labels: This is a list of names that sets the individual x axis labels @type x_labels: C{list} of C{string}s @keyword x_units: This is a list of names that sets the individual x axis units @type x_units: C{list} of C{string}s @keyword split: This flag causes the counts and the fractional area to be written out into separate files. @type split: C{boolean} @keyword configure: This is the object containing the driver configuration. @type configure: C{Configure} @return: Object containing a 2D C{SO} with E and Q axes @rtype: C{SOM.SOM} """ import array_manip import axis_manip import common_lib import hlr_utils import nessi_list import SOM import utils # Check for keywords corner_angles = kwargs["corner_angles"] configure = kwargs.get("configure") split = kwargs.get("split", False) # Setup output object so_dim = SOM.SO(2) so_dim.axis[0].val = Q_final so_dim.axis[1].val = som[0].axis[0].val # E_t # Calculate total 2D array size N_tot = (len(so_dim.axis[0].val) - 1) * (len(so_dim.axis[1].val) - 1) # Create y and var_y lists from total 2D size so_dim.y = nessi_list.NessiList(N_tot) so_dim.var_y = nessi_list.NessiList(N_tot) # Create area sum and errors for the area sum lists from total 2D size area_sum = nessi_list.NessiList(N_tot) area_sum_err2 = nessi_list.NessiList(N_tot) # Convert initial energy to initial wavevector l_i = common_lib.energy_to_wavelength(E_i) k_i = common_lib.wavelength_to_scalar_k(l_i) # Since all the data is rebinned to the same energy transfer axis, we can # calculate the final energy axis once E_t = som[0].axis[0].val if som[0].axis[0].var is not None: E_t_err2 = som[0].axis[0].var else: E_t_err2 = nessi_list.NessiList(len(E_t)) # Get the bin width arrays from E_t (E_t_bw, E_t_bw_err2) = utils.calc_bin_widths(E_t) E_f = array_manip.sub_ncerr(E_i[0], E_i[1], E_t, E_t_err2) # Now we can get the final wavevector l_f = axis_manip.energy_to_wavelength(E_f[0], E_f[1]) k_f = axis_manip.wavelength_to_scalar_k(l_f[0], l_f[1]) # Output position for Q X = 0 # Iterate though the data len_som = hlr_utils.get_length(som) for i in xrange(len_som): map_so = hlr_utils.get_map_so(som, None, i) yval = hlr_utils.get_value(som, i, "SOM", "y") yerr2 = hlr_utils.get_err2(som, i, "SOM", "y") cangles = corner_angles[str(map_so.id)] avg_theta1 = (cangles.getPolar(0) + cangles.getPolar(1)) / 2.0 avg_theta2 = (cangles.getPolar(2) + cangles.getPolar(3)) / 2.0 Q1 = axis_manip.init_scatt_wavevector_to_scalar_Q(k_i[0], k_i[1], k_f[0][:-1], k_f[1][:-1], avg_theta2, 0.0) Q2 = axis_manip.init_scatt_wavevector_to_scalar_Q(k_i[0], k_i[1], k_f[0][:-1], k_f[1][:-1], avg_theta1, 0.0) Q3 = axis_manip.init_scatt_wavevector_to_scalar_Q(k_i[0], k_i[1], k_f[0][1:], k_f[1][1:], avg_theta1, 0.0) Q4 = axis_manip.init_scatt_wavevector_to_scalar_Q(k_i[0], k_i[1], k_f[0][1:], k_f[1][1:], avg_theta2, 0.0) # Calculate the area of the E,Q polygons (A, A_err2) = utils.calc_eq_jacobian_dgs(E_t[:-1], E_t[:-1], E_t[1:], E_t[1:], Q1[X], Q2[X], Q3[X], Q4[X]) # Apply the Jacobian: C/dE_t * dE_t / A(EQ) = C/A(EQ) (jac_ratio, jac_ratio_err2) = array_manip.div_ncerr(E_t_bw, E_t_bw_err2, A, A_err2) (counts, counts_err2) = array_manip.mult_ncerr(yval, yerr2, jac_ratio, jac_ratio_err2) try: (y_2d, y_2d_err2, area_new, bin_count) = axis_manip.rebin_2D_quad_to_rectlin(Q1[X], E_t[:-1], Q2[X], E_t[:-1], Q3[X], E_t[1:], Q4[X], E_t[1:], counts, counts_err2, so_dim.axis[0].val, so_dim.axis[1].val) del bin_count except IndexError, e: # Get the offending index from the error message index = int(str(e).split()[1].split('index')[-1].strip('[]')) print "Id:", map_so.id print "Index:", index print "Verticies: %f, %f, %f, %f, %f, %f, %f, %f" % (Q1[X][index], E_t[:-1][index], Q2[X][index], E_t[:-1][index], Q3[X][index], E_t[1:][index], Q4[X][index], E_t[1:][index]) raise IndexError(str(e)) # Add in together with previous results (so_dim.y, so_dim.var_y) = array_manip.add_ncerr(so_dim.y, so_dim.var_y, y_2d, y_2d_err2) (area_sum, area_sum_err2) = array_manip.add_ncerr(area_sum, area_sum_err2, area_new, area_sum_err2)
def create_E_vs_Q_igs(som, *args, **kwargs): """ This function starts with the initial IGS wavelength axis and turns this into a 2D spectra with E and Q axes. @param som: The input object with initial IGS wavelength axis @type som: C{SOM.SOM} @param args: A mandatory list of axes for rebinning. There is a particular order to them. They should be present in the following order: Without errors 1. Energy transfer 2. Momentum transfer With errors 1. Energy transfer 2. Energy transfer error^2 3. Momentum transfer 4. Momentum transfer error ^2 @type args: C{nessi_list.NessiList}s @param kwargs: A list of keyword arguments that the function accepts: @keyword withXVar: Flag for whether the function should be expecting the associated axes to have errors. The default value will be I{False}. @type withXVar: C{boolean} @keyword data_type: Name of the data type which can be either I{histogram}, I{density} or I{coordinate}. The default value will be I{histogram} @type data_type: C{string} @keyword Q_filter: Flag to turn on or off Q filtering. The default behavior is I{True}. @type Q_filter: C{boolean} @keyword so_id: The identifier represents a number, string, tuple or other object that describes the resulting C{SO} @type so_id: C{int}, C{string}, C{tuple}, C{pixel ID} @keyword y_label: The y axis label @type y_label: C{string} @keyword y_units: The y axis units @type y_units: C{string} @keyword x_labels: This is a list of names that sets the individual x axis labels @type x_labels: C{list} of C{string}s @keyword x_units: This is a list of names that sets the individual x axis units @type x_units: C{list} of C{string}s @keyword split: This flag causes the counts and the fractional area to be written out into separate files. @type split: C{boolean} @keyword configure: This is the object containing the driver configuration. @type configure: C{Configure} @return: Object containing a 2D C{SO} with E and Q axes @rtype: C{SOM.SOM} @raise RuntimeError: Anything other than a C{SOM} is passed to the function @raise RuntimeError: An instrument is not contained in the C{SOM} """ import nessi_list # Setup some variables dim = 2 N_y = [] N_tot = 1 N_args = len(args) # Get T0 slope in order to calculate dT = dT_i + dT_0 try: t_0_slope = som.attr_list["Time_zero_slope"][0] t_0_slope_err2 = som.attr_list["Time_zero_slope"][1] except KeyError: t_0_slope = float(0.0) t_0_slope_err2 = float(0.0) # Check withXVar keyword argument and also check number of given args. # Set xvar to the appropriate value try: value = kwargs["withXVar"] if value.lower() == "true": if N_args != 4: raise RuntimeError("Since you have requested x errors, 4 x "\ +"axes must be provided.") else: xvar = True elif value.lower() == "false": if N_args != 2: raise RuntimeError("Since you did not requested x errors, 2 "\ +"x axes must be provided.") else: xvar = False else: raise RuntimeError("Do not understand given parameter %s" % \ value) except KeyError: if N_args != 2: raise RuntimeError("Since you did not requested x errors, 2 "\ +"x axes must be provided.") else: xvar = False # Check dataType keyword argument. An offset will be set to 1 for the # histogram type and 0 for either density or coordinate try: data_type = kwargs["data_type"] if data_type.lower() == "histogram": offset = 1 elif data_type.lower() == "density" or \ data_type.lower() == "coordinate": offset = 0 else: raise RuntimeError("Do not understand data type given: %s" % \ data_type) # Default is offset for histogram except KeyError: offset = 1 try: Q_filter = kwargs["Q_filter"] except KeyError: Q_filter = True # Check for split keyword try: split = kwargs["split"] except KeyError: split = False # Check for configure keyword try: configure = kwargs["configure"] except KeyError: configure = None so_dim = SOM.SO(dim) for i in range(dim): # Set the x-axis arguments from the *args list into the new SO if not xvar: # Axis positions are 1 (Q) and 0 (E) position = dim - i - 1 so_dim.axis[i].val = args[position] else: # Axis positions are 2 (Q), 3 (eQ), 0 (E), 1 (eE) position = dim - 2 * i so_dim.axis[i].val = args[position] so_dim.axis[i].var = args[position + 1] # Set individual value axis sizes (not x-axis size) N_y.append(len(args[position]) - offset) # Calculate total 2D array size N_tot = N_tot * N_y[-1] # Create y and var_y lists from total 2D size so_dim.y = nessi_list.NessiList(N_tot) so_dim.var_y = nessi_list.NessiList(N_tot) # Create area sum and errors for the area sum lists from total 2D size area_sum = nessi_list.NessiList(N_tot) area_sum_err2 = nessi_list.NessiList(N_tot) # Create area sum and errors for the area sum lists from total 2D size bin_count = nessi_list.NessiList(N_tot) bin_count_err2 = nessi_list.NessiList(N_tot) inst = som.attr_list.instrument lambda_final = som.attr_list["Wavelength_final"] inst_name = inst.get_name() import bisect import math import dr_lib import utils arr_len = 0 #: Vector of zeros for function calculations zero_vec = None for j in xrange(hlr_utils.get_length(som)): # Get counts counts = hlr_utils.get_value(som, j, "SOM", "y") counts_err2 = hlr_utils.get_err2(som, j, "SOM", "y") arr_len = len(counts) zero_vec = nessi_list.NessiList(arr_len) # Get mapping SO map_so = hlr_utils.get_map_so(som, None, j) # Get lambda_i l_i = hlr_utils.get_value(som, j, "SOM", "x") l_i_err2 = hlr_utils.get_err2(som, j, "SOM", "x") # Get lambda_f from instrument information l_f_tuple = hlr_utils.get_special(lambda_final, map_so) l_f = l_f_tuple[0] l_f_err2 = l_f_tuple[1] # Get source to sample distance (L_s, L_s_err2) = hlr_utils.get_parameter("primary", map_so, inst) # Get sample to detector distance L_d_tuple = hlr_utils.get_parameter("secondary", map_so, inst) L_d = L_d_tuple[0] # Get polar angle from instrument information (angle, angle_err2) = hlr_utils.get_parameter("polar", map_so, inst) # Get the detector pixel height dh_tuple = hlr_utils.get_parameter("dh", map_so, inst) dh = dh_tuple[0] # Need dh in units of Angstrom dh *= 1e10 # Calculate T_i (T_i, T_i_err2) = axis_manip.wavelength_to_tof(l_i, l_i_err2, L_s, L_s_err2) # Scale counts by lambda_f / lambda_i (l_i_bc, l_i_bc_err2) = utils.calc_bin_centers(l_i, l_i_err2) (ratio, ratio_err2) = array_manip.div_ncerr(l_f, l_f_err2, l_i_bc, l_i_bc_err2) (counts, counts_err2) = array_manip.mult_ncerr(counts, counts_err2, ratio, ratio_err2) # Calculate E_i (E_i, E_i_err2) = axis_manip.wavelength_to_energy(l_i, l_i_err2) # Calculate E_f (E_f, E_f_err2) = axis_manip.wavelength_to_energy(l_f, l_f_err2) # Calculate E_t (E_t, E_t_err2) = array_manip.sub_ncerr(E_i, E_i_err2, E_f, E_f_err2) if inst_name == "BSS": # Convert E_t from meV to ueV (E_t, E_t_err2) = array_manip.mult_ncerr(E_t, E_t_err2, 1000.0, 0.0) (counts, counts_err2) = array_manip.mult_ncerr(counts, counts_err2, 1.0/1000.0, 0.0) # Convert lambda_i to k_i (k_i, k_i_err2) = axis_manip.wavelength_to_scalar_k(l_i, l_i_err2) # Convert lambda_f to k_f (k_f, k_f_err2) = axis_manip.wavelength_to_scalar_k(l_f, l_f_err2) # Convert k_i and k_f to Q (Q, Q_err2) = axis_manip.init_scatt_wavevector_to_scalar_Q(k_i, k_i_err2, k_f, k_f_err2, angle, angle_err2) # Calculate dT = dT_0 + dT_i dT_i = utils.calc_bin_widths(T_i, T_i_err2) (l_i_bw, l_i_bw_err2) = utils.calc_bin_widths(l_i, l_i_err2) dT_0 = array_manip.mult_ncerr(l_i_bw, l_i_bw_err2, t_0_slope, t_0_slope_err2) dT_tuple = array_manip.add_ncerr(dT_i[0], dT_i[1], dT_0[0], dT_0[1]) dT = dT_tuple[0] # Calculate Jacobian if inst_name == "BSS": (x_1, x_2, x_3, x_4) = dr_lib.calc_BSS_coeffs(map_so, inst, (E_i, E_i_err2), (Q, Q_err2), (k_i, k_i_err2), (T_i, T_i_err2), dh, angle, E_f, k_f, l_f, L_s, L_d, t_0_slope, zero_vec) else: raise RuntimeError("Do not know how to calculate x_i "\ +"coefficients for instrument %s" % inst_name) (A, A_err2) = dr_lib.calc_EQ_Jacobian(x_1, x_2, x_3, x_4, dT, dh, zero_vec) # Apply Jacobian: C/dlam * dlam / A(EQ) = C/EQ (jac_ratio, jac_ratio_err2) = array_manip.div_ncerr(l_i_bw, l_i_bw_err2, A, A_err2) (counts, counts_err2) = array_manip.mult_ncerr(counts, counts_err2, jac_ratio, jac_ratio_err2) # Reverse counts, E_t, k_i and Q E_t = axis_manip.reverse_array_cp(E_t) E_t_err2 = axis_manip.reverse_array_cp(E_t_err2) Q = axis_manip.reverse_array_cp(Q) Q_err2 = axis_manip.reverse_array_cp(Q_err2) counts = axis_manip.reverse_array_cp(counts) counts_err2 = axis_manip.reverse_array_cp(counts_err2) k_i = axis_manip.reverse_array_cp(k_i) x_1 = axis_manip.reverse_array_cp(x_1) x_2 = axis_manip.reverse_array_cp(x_2) x_3 = axis_manip.reverse_array_cp(x_3) x_4 = axis_manip.reverse_array_cp(x_4) dT = axis_manip.reverse_array_cp(dT) # Filter for duplicate Q values if Q_filter: k_i_cutoff = k_f * math.cos(angle) k_i_cutbin = bisect.bisect(k_i, k_i_cutoff) counts.__delslice__(0, k_i_cutbin) counts_err2.__delslice__(0, k_i_cutbin) Q.__delslice__(0, k_i_cutbin) Q_err2.__delslice__(0, k_i_cutbin) E_t.__delslice__(0, k_i_cutbin) E_t_err2.__delslice__(0, k_i_cutbin) x_1.__delslice__(0, k_i_cutbin) x_2.__delslice__(0, k_i_cutbin) x_3.__delslice__(0, k_i_cutbin) x_4.__delslice__(0, k_i_cutbin) dT.__delslice__(0, k_i_cutbin) zero_vec.__delslice__(0, k_i_cutbin) try: if inst_name == "BSS": ((Q_1, E_t_1), (Q_2, E_t_2), (Q_3, E_t_3), (Q_4, E_t_4)) = dr_lib.calc_BSS_EQ_verticies((E_t, E_t_err2), (Q, Q_err2), x_1, x_2, x_3, x_4, dT, dh, zero_vec) else: raise RuntimeError("Do not know how to calculate (Q_i, "\ +"E_t_i) verticies for instrument %s" \ % inst_name) except IndexError: # All the data got Q filtered, move on continue try: (y_2d, y_2d_err2, area_new, bin_count_new) = axis_manip.rebin_2D_quad_to_rectlin(Q_1, E_t_1, Q_2, E_t_2, Q_3, E_t_3, Q_4, E_t_4, counts, counts_err2, so_dim.axis[0].val, so_dim.axis[1].val) except IndexError, e: # Get the offending index from the error message index = int(str(e).split()[1].split('index')[-1].strip('[]')) print "Id:", map_so.id print "Index:", index print "Verticies: %f, %f, %f, %f, %f, %f, %f, %f" % (Q_1[index], E_t_1[index], Q_2[index], E_t_2[index], Q_3[index], E_t_3[index], Q_4[index], E_t_4[index]) raise IndexError(str(e)) # Add in together with previous results (so_dim.y, so_dim.var_y) = array_manip.add_ncerr(so_dim.y, so_dim.var_y, y_2d, y_2d_err2) (area_sum, area_sum_err2) = array_manip.add_ncerr(area_sum, area_sum_err2, area_new, area_sum_err2) if configure.dump_pix_contrib or configure.scale_sqe: if inst_name == "BSS": dOmega = dr_lib.calc_BSS_solid_angle(map_so, inst) (bin_count_new, bin_count_err2) = array_manip.mult_ncerr(bin_count_new, bin_count_err2, dOmega, 0.0) (bin_count, bin_count_err2) = array_manip.add_ncerr(bin_count, bin_count_err2, bin_count_new, bin_count_err2) else: del bin_count_new
def create_E_vs_Q_igs(som, *args, **kwargs): """ This function starts with the initial IGS wavelength axis and turns this into a 2D spectra with E and Q axes. @param som: The input object with initial IGS wavelength axis @type som: C{SOM.SOM} @param args: A mandatory list of axes for rebinning. There is a particular order to them. They should be present in the following order: Without errors 1. Energy transfer 2. Momentum transfer With errors 1. Energy transfer 2. Energy transfer error^2 3. Momentum transfer 4. Momentum transfer error ^2 @type args: C{nessi_list.NessiList}s @param kwargs: A list of keyword arguments that the function accepts: @keyword withXVar: Flag for whether the function should be expecting the associated axes to have errors. The default value will be I{False}. @type withXVar: C{boolean} @keyword data_type: Name of the data type which can be either I{histogram}, I{density} or I{coordinate}. The default value will be I{histogram} @type data_type: C{string} @keyword Q_filter: Flag to turn on or off Q filtering. The default behavior is I{True}. @type Q_filter: C{boolean} @keyword so_id: The identifier represents a number, string, tuple or other object that describes the resulting C{SO} @type so_id: C{int}, C{string}, C{tuple}, C{pixel ID} @keyword y_label: The y axis label @type y_label: C{string} @keyword y_units: The y axis units @type y_units: C{string} @keyword x_labels: This is a list of names that sets the individual x axis labels @type x_labels: C{list} of C{string}s @keyword x_units: This is a list of names that sets the individual x axis units @type x_units: C{list} of C{string}s @keyword split: This flag causes the counts and the fractional area to be written out into separate files. @type split: C{boolean} @keyword configure: This is the object containing the driver configuration. @type configure: C{Configure} @return: Object containing a 2D C{SO} with E and Q axes @rtype: C{SOM.SOM} @raise RuntimeError: Anything other than a C{SOM} is passed to the function @raise RuntimeError: An instrument is not contained in the C{SOM} """ import nessi_list # Setup some variables dim = 2 N_y = [] N_tot = 1 N_args = len(args) # Get T0 slope in order to calculate dT = dT_i + dT_0 try: t_0_slope = som.attr_list["Time_zero_slope"][0] t_0_slope_err2 = som.attr_list["Time_zero_slope"][1] except KeyError: t_0_slope = float(0.0) t_0_slope_err2 = float(0.0) # Check withXVar keyword argument and also check number of given args. # Set xvar to the appropriate value try: value = kwargs["withXVar"] if value.lower() == "true": if N_args != 4: raise RuntimeError("Since you have requested x errors, 4 x "\ +"axes must be provided.") else: xvar = True elif value.lower() == "false": if N_args != 2: raise RuntimeError("Since you did not requested x errors, 2 "\ +"x axes must be provided.") else: xvar = False else: raise RuntimeError("Do not understand given parameter %s" % \ value) except KeyError: if N_args != 2: raise RuntimeError("Since you did not requested x errors, 2 "\ +"x axes must be provided.") else: xvar = False # Check dataType keyword argument. An offset will be set to 1 for the # histogram type and 0 for either density or coordinate try: data_type = kwargs["data_type"] if data_type.lower() == "histogram": offset = 1 elif data_type.lower() == "density" or \ data_type.lower() == "coordinate": offset = 0 else: raise RuntimeError("Do not understand data type given: %s" % \ data_type) # Default is offset for histogram except KeyError: offset = 1 try: Q_filter = kwargs["Q_filter"] except KeyError: Q_filter = True # Check for split keyword try: split = kwargs["split"] except KeyError: split = False # Check for configure keyword try: configure = kwargs["configure"] except KeyError: configure = None so_dim = SOM.SO(dim) for i in range(dim): # Set the x-axis arguments from the *args list into the new SO if not xvar: # Axis positions are 1 (Q) and 0 (E) position = dim - i - 1 so_dim.axis[i].val = args[position] else: # Axis positions are 2 (Q), 3 (eQ), 0 (E), 1 (eE) position = dim - 2 * i so_dim.axis[i].val = args[position] so_dim.axis[i].var = args[position + 1] # Set individual value axis sizes (not x-axis size) N_y.append(len(args[position]) - offset) # Calculate total 2D array size N_tot = N_tot * N_y[-1] # Create y and var_y lists from total 2D size so_dim.y = nessi_list.NessiList(N_tot) so_dim.var_y = nessi_list.NessiList(N_tot) # Create area sum and errors for the area sum lists from total 2D size area_sum = nessi_list.NessiList(N_tot) area_sum_err2 = nessi_list.NessiList(N_tot) # Create area sum and errors for the area sum lists from total 2D size bin_count = nessi_list.NessiList(N_tot) bin_count_err2 = nessi_list.NessiList(N_tot) inst = som.attr_list.instrument lambda_final = som.attr_list["Wavelength_final"] inst_name = inst.get_name() import bisect import math import dr_lib import utils arr_len = 0 #: Vector of zeros for function calculations zero_vec = None for j in xrange(hlr_utils.get_length(som)): # Get counts counts = hlr_utils.get_value(som, j, "SOM", "y") counts_err2 = hlr_utils.get_err2(som, j, "SOM", "y") arr_len = len(counts) zero_vec = nessi_list.NessiList(arr_len) # Get mapping SO map_so = hlr_utils.get_map_so(som, None, j) # Get lambda_i l_i = hlr_utils.get_value(som, j, "SOM", "x") l_i_err2 = hlr_utils.get_err2(som, j, "SOM", "x") # Get lambda_f from instrument information l_f_tuple = hlr_utils.get_special(lambda_final, map_so) l_f = l_f_tuple[0] l_f_err2 = l_f_tuple[1] # Get source to sample distance (L_s, L_s_err2) = hlr_utils.get_parameter("primary", map_so, inst) # Get sample to detector distance L_d_tuple = hlr_utils.get_parameter("secondary", map_so, inst) L_d = L_d_tuple[0] # Get polar angle from instrument information (angle, angle_err2) = hlr_utils.get_parameter("polar", map_so, inst) # Get the detector pixel height dh_tuple = hlr_utils.get_parameter("dh", map_so, inst) dh = dh_tuple[0] # Need dh in units of Angstrom dh *= 1e10 # Calculate T_i (T_i, T_i_err2) = axis_manip.wavelength_to_tof(l_i, l_i_err2, L_s, L_s_err2) # Scale counts by lambda_f / lambda_i (l_i_bc, l_i_bc_err2) = utils.calc_bin_centers(l_i, l_i_err2) (ratio, ratio_err2) = array_manip.div_ncerr(l_f, l_f_err2, l_i_bc, l_i_bc_err2) (counts, counts_err2) = array_manip.mult_ncerr(counts, counts_err2, ratio, ratio_err2) # Calculate E_i (E_i, E_i_err2) = axis_manip.wavelength_to_energy(l_i, l_i_err2) # Calculate E_f (E_f, E_f_err2) = axis_manip.wavelength_to_energy(l_f, l_f_err2) # Calculate E_t (E_t, E_t_err2) = array_manip.sub_ncerr(E_i, E_i_err2, E_f, E_f_err2) if inst_name == "BSS": # Convert E_t from meV to ueV (E_t, E_t_err2) = array_manip.mult_ncerr(E_t, E_t_err2, 1000.0, 0.0) (counts, counts_err2) = array_manip.mult_ncerr(counts, counts_err2, 1.0 / 1000.0, 0.0) # Convert lambda_i to k_i (k_i, k_i_err2) = axis_manip.wavelength_to_scalar_k(l_i, l_i_err2) # Convert lambda_f to k_f (k_f, k_f_err2) = axis_manip.wavelength_to_scalar_k(l_f, l_f_err2) # Convert k_i and k_f to Q (Q, Q_err2) = axis_manip.init_scatt_wavevector_to_scalar_Q( k_i, k_i_err2, k_f, k_f_err2, angle, angle_err2) # Calculate dT = dT_0 + dT_i dT_i = utils.calc_bin_widths(T_i, T_i_err2) (l_i_bw, l_i_bw_err2) = utils.calc_bin_widths(l_i, l_i_err2) dT_0 = array_manip.mult_ncerr(l_i_bw, l_i_bw_err2, t_0_slope, t_0_slope_err2) dT_tuple = array_manip.add_ncerr(dT_i[0], dT_i[1], dT_0[0], dT_0[1]) dT = dT_tuple[0] # Calculate Jacobian if inst_name == "BSS": (x_1, x_2, x_3, x_4) = dr_lib.calc_BSS_coeffs( map_so, inst, (E_i, E_i_err2), (Q, Q_err2), (k_i, k_i_err2), (T_i, T_i_err2), dh, angle, E_f, k_f, l_f, L_s, L_d, t_0_slope, zero_vec) else: raise RuntimeError("Do not know how to calculate x_i "\ +"coefficients for instrument %s" % inst_name) (A, A_err2) = dr_lib.calc_EQ_Jacobian(x_1, x_2, x_3, x_4, dT, dh, zero_vec) # Apply Jacobian: C/dlam * dlam / A(EQ) = C/EQ (jac_ratio, jac_ratio_err2) = array_manip.div_ncerr(l_i_bw, l_i_bw_err2, A, A_err2) (counts, counts_err2) = array_manip.mult_ncerr(counts, counts_err2, jac_ratio, jac_ratio_err2) # Reverse counts, E_t, k_i and Q E_t = axis_manip.reverse_array_cp(E_t) E_t_err2 = axis_manip.reverse_array_cp(E_t_err2) Q = axis_manip.reverse_array_cp(Q) Q_err2 = axis_manip.reverse_array_cp(Q_err2) counts = axis_manip.reverse_array_cp(counts) counts_err2 = axis_manip.reverse_array_cp(counts_err2) k_i = axis_manip.reverse_array_cp(k_i) x_1 = axis_manip.reverse_array_cp(x_1) x_2 = axis_manip.reverse_array_cp(x_2) x_3 = axis_manip.reverse_array_cp(x_3) x_4 = axis_manip.reverse_array_cp(x_4) dT = axis_manip.reverse_array_cp(dT) # Filter for duplicate Q values if Q_filter: k_i_cutoff = k_f * math.cos(angle) k_i_cutbin = bisect.bisect(k_i, k_i_cutoff) counts.__delslice__(0, k_i_cutbin) counts_err2.__delslice__(0, k_i_cutbin) Q.__delslice__(0, k_i_cutbin) Q_err2.__delslice__(0, k_i_cutbin) E_t.__delslice__(0, k_i_cutbin) E_t_err2.__delslice__(0, k_i_cutbin) x_1.__delslice__(0, k_i_cutbin) x_2.__delslice__(0, k_i_cutbin) x_3.__delslice__(0, k_i_cutbin) x_4.__delslice__(0, k_i_cutbin) dT.__delslice__(0, k_i_cutbin) zero_vec.__delslice__(0, k_i_cutbin) try: if inst_name == "BSS": ((Q_1, E_t_1), (Q_2, E_t_2), (Q_3, E_t_3), (Q_4, E_t_4)) = dr_lib.calc_BSS_EQ_verticies( (E_t, E_t_err2), (Q, Q_err2), x_1, x_2, x_3, x_4, dT, dh, zero_vec) else: raise RuntimeError("Do not know how to calculate (Q_i, "\ +"E_t_i) verticies for instrument %s" \ % inst_name) except IndexError: # All the data got Q filtered, move on continue try: (y_2d, y_2d_err2, area_new, bin_count_new) = axis_manip.rebin_2D_quad_to_rectlin( Q_1, E_t_1, Q_2, E_t_2, Q_3, E_t_3, Q_4, E_t_4, counts, counts_err2, so_dim.axis[0].val, so_dim.axis[1].val) except IndexError, e: # Get the offending index from the error message index = int(str(e).split()[1].split('index')[-1].strip('[]')) print "Id:", map_so.id print "Index:", index print "Verticies: %f, %f, %f, %f, %f, %f, %f, %f" % ( Q_1[index], E_t_1[index], Q_2[index], E_t_2[index], Q_3[index], E_t_3[index], Q_4[index], E_t_4[index]) raise IndexError(str(e)) # Add in together with previous results (so_dim.y, so_dim.var_y) = array_manip.add_ncerr(so_dim.y, so_dim.var_y, y_2d, y_2d_err2) (area_sum, area_sum_err2) = array_manip.add_ncerr(area_sum, area_sum_err2, area_new, area_sum_err2) if configure.dump_pix_contrib or configure.scale_sqe: if inst_name == "BSS": dOmega = dr_lib.calc_BSS_solid_angle(map_so, inst) (bin_count_new, bin_count_err2) = array_manip.mult_ncerr( bin_count_new, bin_count_err2, dOmega, 0.0) (bin_count, bin_count_err2) = array_manip.add_ncerr( bin_count, bin_count_err2, bin_count_new, bin_count_err2) else: del bin_count_new
def fix_bin_contents(obj, **kwargs): """ This function takes a SOM or SO and goes through the individual spectra adjusting the bin contents by either multiplying or dividing by the bin widths or the bin centers taken from the individual spectra. @param obj: The data object to be scaled @type obj: C{SOM.SOM} or C{SOM.SO} @param kwargs: A list of keyword arguments that the function accepts: @keyword scale: A flag that signals multiplication by the required bin quantity. The default is I{False} (divide). @type scale: C{bool} @keyword width: A flag that signals that the adjusting quantity is the bin width. The default is I{True}. If I{False}, the bin center is used. @type width: C{bool} @keyword units: The expected units for this function. The default for this function is I{microsecond}. @type units: C{string} @return: The object with the individual spectrum scaled @rtype: C{SOM.SOM} or C{SOM.SO} """ import hlr_utils # set up for working through data (result, res_descr) = hlr_utils.empty_result(obj) o_descr = hlr_utils.get_descr(obj) # Setup keyword arguments try: scale = kwargs["scale"] except KeyError: scale = False try: width = kwargs["width"] except KeyError: width = True try: units = kwargs["units"] except KeyError: units = "microsecond" # Primary axis for transformation. If a SO is passed, the function, will # assume the axis for transformation is at the 0 position if o_descr == "SOM": axis_pos = hlr_utils.one_d_units(obj, units) else: axis_pos = 0 result = hlr_utils.copy_som_attr(result, res_descr, obj, o_descr) # iterate through the values import array_manip import utils for i in xrange(hlr_utils.get_length(obj)): val = hlr_utils.get_value(obj, i, o_descr, "y") err2 = hlr_utils.get_err2(obj, i, o_descr, "y") axis = hlr_utils.get_value(obj, i, o_descr, "x", axis_pos) axis_err2 = hlr_utils.get_err2(obj, i, o_descr, "x", axis_pos) map_so = hlr_utils.get_map_so(obj, None, i) if width: (bin_const, bin_const_err2) = utils.calc_bin_widths(axis, axis_err2) else: (bin_const, bin_const_err2) = utils.calc_bin_centers(axis, axis_err2) if scale: value = array_manip.mult_ncerr(val, err2, bin_const, bin_const_err2) else: value = array_manip.div_ncerr(val, err2, bin_const, bin_const_err2) hlr_utils.result_insert(result, res_descr, value, map_so, "y") return result
def create_E_vs_Q_dgs(som, E_i, Q_final, **kwargs): """ This function starts with the rebinned energy transfer and turns this into a 2D spectra with E and Q axes for DGS instruments. @param som: The input object with initial IGS wavelength axis @type som: C{SOM.SOM} @param E_i: The initial energy for the given data. @type E_i: C{tuple} @param Q_final: The momentum transfer axis to rebin the data to @type Q_final: C{nessi_list.NessiList} @param kwargs: A list of keyword arguments that the function accepts: @keyword corner_angles: The object that contains the corner geometry information. @type corner_angles: C{dict} @keyword so_id: The identifier represents a number, string, tuple or other object that describes the resulting C{SO} @type so_id: C{int}, C{string}, C{tuple}, C{pixel ID} @keyword y_label: The y axis label @type y_label: C{string} @keyword y_units: The y axis units @type y_units: C{string} @keyword x_labels: This is a list of names that sets the individual x axis labels @type x_labels: C{list} of C{string}s @keyword x_units: This is a list of names that sets the individual x axis units @type x_units: C{list} of C{string}s @keyword split: This flag causes the counts and the fractional area to be written out into separate files. @type split: C{boolean} @keyword configure: This is the object containing the driver configuration. @type configure: C{Configure} @return: Object containing a 2D C{SO} with E and Q axes @rtype: C{SOM.SOM} """ import array_manip import axis_manip import common_lib import hlr_utils import nessi_list import SOM import utils # Check for keywords corner_angles = kwargs["corner_angles"] configure = kwargs.get("configure") split = kwargs.get("split", False) # Setup output object so_dim = SOM.SO(2) so_dim.axis[0].val = Q_final so_dim.axis[1].val = som[0].axis[0].val # E_t # Calculate total 2D array size N_tot = (len(so_dim.axis[0].val) - 1) * (len(so_dim.axis[1].val) - 1) # Create y and var_y lists from total 2D size so_dim.y = nessi_list.NessiList(N_tot) so_dim.var_y = nessi_list.NessiList(N_tot) # Create area sum and errors for the area sum lists from total 2D size area_sum = nessi_list.NessiList(N_tot) area_sum_err2 = nessi_list.NessiList(N_tot) # Convert initial energy to initial wavevector l_i = common_lib.energy_to_wavelength(E_i) k_i = common_lib.wavelength_to_scalar_k(l_i) # Since all the data is rebinned to the same energy transfer axis, we can # calculate the final energy axis once E_t = som[0].axis[0].val if som[0].axis[0].var is not None: E_t_err2 = som[0].axis[0].var else: E_t_err2 = nessi_list.NessiList(len(E_t)) # Get the bin width arrays from E_t (E_t_bw, E_t_bw_err2) = utils.calc_bin_widths(E_t) E_f = array_manip.sub_ncerr(E_i[0], E_i[1], E_t, E_t_err2) # Now we can get the final wavevector l_f = axis_manip.energy_to_wavelength(E_f[0], E_f[1]) k_f = axis_manip.wavelength_to_scalar_k(l_f[0], l_f[1]) # Output position for Q X = 0 # Iterate though the data len_som = hlr_utils.get_length(som) for i in xrange(len_som): map_so = hlr_utils.get_map_so(som, None, i) yval = hlr_utils.get_value(som, i, "SOM", "y") yerr2 = hlr_utils.get_err2(som, i, "SOM", "y") cangles = corner_angles[str(map_so.id)] avg_theta1 = (cangles.getPolar(0) + cangles.getPolar(1)) / 2.0 avg_theta2 = (cangles.getPolar(2) + cangles.getPolar(3)) / 2.0 Q1 = axis_manip.init_scatt_wavevector_to_scalar_Q( k_i[0], k_i[1], k_f[0][:-1], k_f[1][:-1], avg_theta2, 0.0) Q2 = axis_manip.init_scatt_wavevector_to_scalar_Q( k_i[0], k_i[1], k_f[0][:-1], k_f[1][:-1], avg_theta1, 0.0) Q3 = axis_manip.init_scatt_wavevector_to_scalar_Q( k_i[0], k_i[1], k_f[0][1:], k_f[1][1:], avg_theta1, 0.0) Q4 = axis_manip.init_scatt_wavevector_to_scalar_Q( k_i[0], k_i[1], k_f[0][1:], k_f[1][1:], avg_theta2, 0.0) # Calculate the area of the E,Q polygons (A, A_err2) = utils.calc_eq_jacobian_dgs(E_t[:-1], E_t[:-1], E_t[1:], E_t[1:], Q1[X], Q2[X], Q3[X], Q4[X]) # Apply the Jacobian: C/dE_t * dE_t / A(EQ) = C/A(EQ) (jac_ratio, jac_ratio_err2) = array_manip.div_ncerr(E_t_bw, E_t_bw_err2, A, A_err2) (counts, counts_err2) = array_manip.mult_ncerr(yval, yerr2, jac_ratio, jac_ratio_err2) try: (y_2d, y_2d_err2, area_new, bin_count) = axis_manip.rebin_2D_quad_to_rectlin( Q1[X], E_t[:-1], Q2[X], E_t[:-1], Q3[X], E_t[1:], Q4[X], E_t[1:], counts, counts_err2, so_dim.axis[0].val, so_dim.axis[1].val) del bin_count except IndexError, e: # Get the offending index from the error message index = int(str(e).split()[1].split('index')[-1].strip('[]')) print "Id:", map_so.id print "Index:", index print "Verticies: %f, %f, %f, %f, %f, %f, %f, %f" % ( Q1[X][index], E_t[:-1][index], Q2[X][index], E_t[:-1][index], Q3[X][index], E_t[1:][index], Q4[X][index], E_t[1:][index]) raise IndexError(str(e)) # Add in together with previous results (so_dim.y, so_dim.var_y) = array_manip.add_ncerr(so_dim.y, so_dim.var_y, y_2d, y_2d_err2) (area_sum, area_sum_err2) = array_manip.add_ncerr(area_sum, area_sum_err2, area_new, area_sum_err2)
def integrate_axis_py(obj, **kwargs): """ This function takes a spectrum and integrates the given axis. The function assumes that the incoming data is in the histogram form. @param obj: Spectrum to be integrated @type obj: C{SOM.SOM} or C{SOM.SO} @param kwargs: A list of keyword arguments that the function accepts: @keyword start: Index of the starting bin @type start: C{int} @keyword end: Index of the ending bin. This index is made inclusive by the function. @type end: C{int} @keyword axis: This is the axis one wishes to manipulate. If no argument is given the default value is I{y}. @type axis: C{string}=<y or x> @keyword axis_pos: This is position of the axis in the axis array. If no argument is given, the default value is I{0}. @type axis_pos: C{int} @keyword avg: This allows the function to calculate a geometrical average. The default value is I{False}. @type avg: C{boolean} @keyword width: This is a flag to turn on the multiplication of the individual bin contents with the bins corresponding width. @type width: C{boolean} @keyword width_pos: This is position of the x-axis in the axis array from which to calculate the bin widths in support of the width flag. If no argument is given, the default value is I{0}. @type width_pos: C{int} @return: The integration value and its associated error @rtype: C{tuple} @raise RuntimError: A C{SOM} or C{SO} is not given to the function. @raise RuntimeError: The width keyword is used with x-axis integration. """ # import the helper functions import hlr_utils # set up for working through data o_descr = hlr_utils.get_descr(obj) if o_descr == "number" or o_descr == "list": raise RuntimeError("Must provide a SOM of a SO to the function.") # Go on else: pass # Check for starting bin try: start = kwargs["start"] except KeyError: start = 0 # Check for ending bin try: end = kwargs["end"] if end != -1: end += 1 else: pass except KeyError: end = -1 # Check for axis keyword argument try: axis = kwargs["axis"] except KeyError: axis = "y" # Check for axis_pos keyword argument try: axis_pos = kwargs["axis_pos"] except KeyError: axis_pos = 0 # Check for avg keyword argument try: avg = kwargs["avg"] except KeyError: avg = False # Check for width keyword argument try: width = kwargs["width"] except KeyError: width = False # Check for width_pos keyword argument try: width_pos = kwargs["width_pos"] except KeyError: width_pos = 0 integration = float(0) integration_error2 = float(0) import itertools if width: import utils bad_values = ["nan", "inf", "-inf"] for i in xrange(hlr_utils.get_length(obj)): counter = 0 value = hlr_utils.get_value(obj, i, o_descr, axis, axis_pos) error = hlr_utils.get_err2(obj, i, o_descr, axis, axis_pos) if end == -1: value = value[start:] error = error[start:] else: value = value[start:end] error = error[start:end] if not width: for val, err2 in itertools.izip(value, error): if str(val) in bad_values or str(err2) in bad_values: continue else: integration += val integration_error2 += err2 counter += 1 else: if axis == "y": x_axis = hlr_utils.get_value(obj, i, o_descr, "x", width_pos) x_err2 = hlr_utils.get_err2(obj, i, o_descr, "x", width_pos) elif axis == "x": raise RuntimeError("Cannot use width flag with x-axis "\ +"integration") bin_widths = utils.calc_bin_widths(x_axis, x_err2) for val, err2, delta in itertools.izip(value, error, bin_widths[0]): if str(val) in bad_values or str(err2) in bad_values: continue else: integration += (delta * val) integration_error2 += (delta * delta * err2) counter += 1 if avg: return (integration / float(counter), integration_error2 / float(counter)) else: return (integration, integration_error2)