Example #1
0
def run(config, tim=None):
    """
    This method is where the data reduction process gets done.

    @param config: Object containing the data reduction configuration
                   information.
    @type config: L{hlr_utils.Configure}

    @param tim: (OPTIONAL) Object that will allow the method to perform
                           timing evaluations.
    @type tim: C{sns_time.DiffTime}
    """
    import common_lib
    import dr_lib
    import DST

    if tim is not None:
        tim.getTime(False)
        old_time = tim.getOldTime()

    if config.data is None:
        raise RuntimeError("Need to pass a data filename to the driver "\
                           +"script.")

    # Read in geometry if one is provided
    if config.inst_geom is not None:
        if config.verbose:
            print "Reading in instrument geometry file"

        inst_geom_dst = DST.getInstance("application/x-NxsGeom",
                                        config.inst_geom)
    else:
        inst_geom_dst = None

    # Add so_axis to Configure object
    config.so_axis = "time_of_flight"

    dataset_type = "background"

    # Step 0: Open appropriate data files

    # Data
    if config.verbose:
        print "Reading %s file" % dataset_type

    # The [0] is to get the data SOM and ignore the None background SOM
    dp_som = dr_lib.add_files(config.data,
                              Data_Paths=config.data_paths.toPath(),
                              SO_Axis=config.so_axis,
                              Signal_ROI=config.roi_file,
                              dataset_type=dataset_type,
                              Verbose=config.verbose,
                              Timer=tim)

    if tim is not None:
        tim.getTime(msg="After reading %s " % dataset_type)

    dp_som0 = dr_lib.fix_bin_contents(dp_som)

    del dp_som

    if inst_geom_dst is not None:
        inst_geom_dst.setGeometry(config.data_paths.toPath(), dp_som0)

    # Note: time_zero_offset_det MUST be a tuple
    if config.time_zero_offset_det is not None:
        dp_som0.attr_list["Time_zero_offset_det"] = \
                                   config.time_zero_offset_det.toValErrTuple()

    # Step 2: Convert TOF to wavelength for data
    if config.verbose:
        print "Converting TOF to wavelength"

    if tim is not None:
        tim.getTime(False)

    # Convert detector pixels
    dp_som1 = common_lib.tof_to_wavelength_lin_time_zero(
        dp_som0,
        units="microsecond",
        time_zero_offset=config.time_zero_offset_det.toValErrTuple(),
        inst_param="total")

    if tim is not None:
        tim.getTime(msg="After converting TOF to wavelength ")

    del dp_som0

    if config.verbose:
        print "Cutting spectra"

    if tim is not None:
        tim.getTime(False)

    dp_som2 = dr_lib.cut_spectra(dp_som1, config.lambda_low_cut,
                                 config.lambda_high_cut)

    if tim is not None:
        tim.getTime(msg="After cutting spectra ")

    del dp_som1

    rebin_axis = config.lambda_bins.toNessiList()

    # Put the data on the same axis
    if config.verbose:
        print "Rebinning data onto specified wavelength axis"

    if tim is not None:
        tim.getTime(False)

    dp_som3 = dr_lib.sum_by_rebin_frac(dp_som2, rebin_axis)

    if tim is not None:
        tim.getTime(msg="After rebinning data onto specified wavelength axis ")

    del dp_som2

    data_run_time = dp_som3.attr_list["background-duration"]

    # Calculate the accelerator on time
    if config.verbose:
        print "Calculating accelerator on time"

    acc_on_time = hlr_utils.DrParameter(
        data_run_time.getValue() - config.acc_down_time.getValue(), 0.0,
        "seconds")

    # Get the number of data bins
    num_wave_bins = len(rebin_axis) - 1

    # Calculate the scaled accelerator uptime
    if config.verbose:
        print "Calculating the scaled accelerator uptime"

    if tim is not None:
        tim.getTime(False)

    final_scale = acc_on_time.toValErrTuple()[0] / num_wave_bins

    if tim is not None:
        tim.getTime(msg="After calculating the scaled accelerator uptime ")

    # Create the final background spectrum
    if config.verbose:
        print "Creating the background spectrum"

    if tim is not None:
        tim.getTime(False)

    dp_som4 = common_lib.div_ncerr(dp_som3, (final_scale, 0))
    dp_som4.attr_list["%s-Scaling" % dataset_type] = final_scale

    if tim is not None:
        tim.getTime(msg="After creating background spectrum ")

    del dp_som3

    # Write out the background spectrum
    hlr_utils.write_file(config.output,
                         "text/Spec",
                         dp_som4,
                         verbose=config.verbose,
                         output_ext="bkg",
                         data_ext=config.ext_replacement,
                         replace_path=False,
                         replace_ext=True,
                         message="background spectrum")

    dp_som4.attr_list["config"] = config

    hlr_utils.write_file(config.output,
                         "text/rmd",
                         dp_som4,
                         output_ext="rmd",
                         data_ext=config.ext_replacement,
                         path_replacement=config.path_replacement,
                         verbose=config.verbose,
                         message="metadata")

    if tim is not None:
        tim.setOldTime(old_time)
        tim.getTime(msg="Total Running Time")
Example #2
0
def run(config, tim):
    """
    This method is where the data reduction process gets done.

    @param config: Object containing the data reduction configuration
                   information.
    @type config: L{hlr_utils.Configure}

    @param tim: Object that will allow the method to perform timing
                evaluations.
    @type tim: C{sns_time.DiffTime}
    """
    import DST
    import math

    if config.inst == "REF_M":
        import axis_manip
        import utils

    if tim is not None:
        tim.getTime(False)
        old_time = tim.getOldTime()

    if config.data is None:
        raise RuntimeError("Need to pass a data filename to the driver " + "script.")

    # Read in sample data geometry if one is provided
    if config.data_inst_geom is not None:
        if config.verbose:
            print "Reading in sample data instrument geometry file"

        data_inst_geom_dst = DST.getInstance("application/x-NxsGeom", config.data_inst_geom)
    else:
        data_inst_geom_dst = None

    # Read in normalization data geometry if one is provided
    if config.norm_inst_geom is not None:
        if config.verbose:
            print "Reading in normalization instrument geometry file"

        norm_inst_geom_dst = DST.getInstance("application/x-NxsGeom", config.norm_inst_geom)
    else:
        norm_inst_geom_dst = None

    # Perform Steps 1-6 on sample data
    d_som1 = dr_lib.process_ref_data(
        config.data,
        config,
        config.data_roi_file,
        config.dbkg_roi_file,
        config.no_bkg,
        tof_cuts=config.tof_cuts,
        inst_geom_dst=data_inst_geom_dst,
        timer=tim,
    )

    # Perform Steps 1-6 on normalization data
    if config.norm is not None:
        n_som1 = dr_lib.process_ref_data(
            config.norm,
            config,
            config.norm_roi_file,
            config.nbkg_roi_file,
            config.no_norm_bkg,
            dataset_type="norm",
            tof_cuts=config.tof_cuts,
            inst_geom_dst=norm_inst_geom_dst,
            timer=tim,
        )
    else:
        n_som1 = None

    if config.Q_bins is None and config.scatt_angle is not None:
        import copy

        tof_axis = copy.deepcopy(d_som1[0].axis[0].val)

    # Closing sample data instrument geometry file
    if data_inst_geom_dst is not None:
        data_inst_geom_dst.release_resource()

    # Closing normalization data instrument geometry file
    if norm_inst_geom_dst is not None:
        norm_inst_geom_dst.release_resource()

    # Step 7: Sum all normalization spectra together
    if config.norm is not None:
        n_som2 = dr_lib.sum_all_spectra(n_som1)
    else:
        n_som2 = None

    del n_som1

    # Step 8: Divide data by normalization
    if config.verbose and config.norm is not None:
        print "Scale data by normalization"

    if config.norm is not None:
        d_som2 = common_lib.div_ncerr(d_som1, n_som2, length_one_som=True)
    else:
        d_som2 = d_som1

    if tim is not None and config.norm is not None:
        tim.getTime(msg="After normalizing signal spectra")

    del d_som1, n_som2

    if config.dump_rtof_comb:
        d_som2_1 = dr_lib.sum_all_spectra(d_som2)
        d_som2_2 = dr_lib.data_filter(d_som2_1)
        del d_som2_1

        if config.inst == "REF_M":
            tof_bc = utils.calc_bin_centers(d_som2_2[0].axis[0].val)
            d_som2_2[0].axis[0].val = tof_bc[0]
            d_som2_2.setDataSetType("density")

        hlr_utils.write_file(
            config.output,
            "text/Spec",
            d_som2_2,
            output_ext="crtof",
            verbose=config.verbose,
            data_ext=config.ext_replacement,
            path_replacement=config.path_replacement,
            message="combined R(TOF) information",
        )

        del d_som2_2

    if config.dump_rtof:
        if config.inst == "REF_M":
            d_som2_1 = d_som2
        else:
            d_som2_1 = dr_lib.filter_ref_data(d_som2)

        hlr_utils.write_file(
            config.output,
            "text/Spec",
            d_som2_1,
            output_ext="rtof",
            verbose=config.verbose,
            data_ext=config.ext_replacement,
            path_replacement=config.path_replacement,
            message="R(TOF) information",
        )
        del d_som2_1

    if config.inst == "REF_L":
        # Step 9: Convert TOF to scalar Q
        if config.verbose:
            print "Converting TOF to scalar Q"

        # Check to see if polar angle offset is necessary
        if config.angle_offset is not None:
            # Check on units, offset must be in radians
            p_temp = config.angle_offset.toFullTuple(True)
            if p_temp[2] == "degrees" or p_temp[2] == "degree":
                deg_to_rad = math.pi / 180.0
                p_off_rads = p_temp[0] * deg_to_rad
                p_off_err2_rads = p_temp[1] * deg_to_rad * deg_to_rad
            else:
                p_off_rads = p_temp[0]
                p_off_err2_rads = p_temp[1]

            p_offset = (p_off_rads, p_off_err2_rads)

            d_som2.attr_list["angle_offset"] = config.angle_offset
        else:
            p_offset = None

        if tim is not None:
            tim.getTime(False)

        d_som3 = common_lib.tof_to_scalar_Q(d_som2, units="microsecond", angle_offset=p_offset, lojac=False)

        del d_som2

        if tim is not None:
            tim.getTime(msg="After converting wavelength to scalar Q ")

        if config.dump_rq:
            d_som3_1 = dr_lib.data_filter(d_som3, clean_axis=True)
            hlr_utils.write_file(
                config.output,
                "text/Spec",
                d_som3_1,
                output_ext="rq",
                verbose=config.verbose,
                data_ext=config.ext_replacement,
                path_replacement=config.path_replacement,
                message="pixel R(Q) information",
            )
            del d_som3_1

        if not config.no_filter:
            if config.verbose:
                print "Filtering final data"

            if tim is not None:
                tim.getTime(False)

            d_som4 = dr_lib.data_filter(d_som3)

            if tim is not None:
                tim.getTime(msg="After filtering data")
        else:
            d_som4 = d_som3

        del d_som3
    else:
        d_som4 = d_som2

    # Step 10: Rebin all spectra to final Q axis
    if config.Q_bins is None:
        if config.scatt_angle is None:
            config.Q_bins = dr_lib.create_axis_from_data(d_som4)
            rebin_axis = config.Q_bins.toNessiList()
        else:
            # Get scattering angle and make Q conversion from TOF axis
            # Check on units, scattering angle must be in radians
            sa_temp = config.scatt_angle.toFullTuple(True)
            if sa_temp[2] == "degrees" or sa_temp[2] == "degree":
                deg_to_rad = math.pi / 180.0
                sa_rads = sa_temp[0] * deg_to_rad
                sa_err2_rads = sa_temp[1] * deg_to_rad * deg_to_rad
            else:
                sa_rads = sa_temp[0]
                sa_err2_rads = sa_temp[1]

            sa = (sa_rads, sa_err2_rads)

            pl = d_som4.attr_list.instrument.get_total_path(d_som4[0].id, det_secondary=True)

            import nessi_list

            tof_axis_err2 = nessi_list.NessiList(len(tof_axis))

            rebin_axis = axis_manip.tof_to_scalar_Q(tof_axis, tof_axis_err2, pl[0], pl[1], sa[0], sa[1])[0]

            axis_manip.reverse_array_nc(rebin_axis)
    else:
        rebin_axis = config.Q_bins.toNessiList()

    if config.inst == "REF_L":
        if config.verbose:
            print "Rebinning spectra"

        if tim is not None:
            tim.getTime(False)

        d_som5 = common_lib.rebin_axis_1D_linint(d_som4, rebin_axis)

        if tim is not None:
            tim.getTime(msg="After rebinning spectra")

        del d_som4

        if config.dump_rqr:
            hlr_utils.write_file(
                config.output,
                "text/Spec",
                d_som5,
                output_ext="rqr",
                verbose=config.verbose,
                data_ext=config.ext_replacement,
                path_replacement=config.path_replacement,
                message="pixel R(Q) (after rebinning) " + "information",
            )

        # Step 11: Sum all rebinned spectra
        if config.verbose:
            print "Summing spectra"

        if tim is not None:
            tim.getTime(False)

        d_som6 = dr_lib.sum_all_spectra(d_som5)

        if tim is not None:
            tim.getTime(msg="After summing spectra")

        del d_som5
    else:
        d_som5 = d_som4

    if config.inst == "REF_M":
        d_som5A = dr_lib.sum_all_spectra(d_som5)
        del d_som5
        d_som6 = dr_lib.data_filter(d_som5A)
        del d_som5A
        axis_manip.reverse_array_nc(d_som6[0].y)
        axis_manip.reverse_array_nc(d_som6[0].var_y)

        d_som6.setYLabel("Intensity")
        d_som6.setYUnits("Counts/A-1")
        d_som6.setAllAxisLabels(["scalar wavevector transfer"])
        d_som6.setAllAxisUnits(["1/Angstroms"])

        Q_bc = utils.calc_bin_centers(rebin_axis)
        d_som6[0].axis[0].val = Q_bc[0]
        d_som6.setDataSetType("density")

    hlr_utils.write_file(
        config.output,
        "text/Spec",
        d_som6,
        replace_ext=False,
        replace_path=False,
        verbose=config.verbose,
        message="combined Reflectivity information",
    )

    d_som6.attr_list["config"] = config

    hlr_utils.write_file(
        config.output,
        "text/rmd",
        d_som6,
        output_ext="rmd",
        verbose=config.verbose,
        data_ext=config.ext_replacement,
        path_replacement=config.path_replacement,
        message="metadata",
    )

    if tim is not None:
        tim.setOldTime(old_time)
        tim.getTime(msg="Total Running Time")
Example #3
0
def add_files(filelist, **kwargs):
    """
    This function takes a list of U{NeXus<www.nexusformat.org>} files and
    various keyword arguments and returns a data C{SOM} and a background C{SOM}
    (if requested) that is the sum of all the data from the specified files.
    B{It is assumed that the files contain similar data as only crude
    cross-checks will be made. You have been warned.}

    @param filelist: A list containing the names of the files to sum
    @type filelist: C{list}
    
    @param kwargs: A list of keyword arguments that the function accepts:
    
    @keyword SO_Axis: This is the name of the main axis to read from the NeXus
                      file
    @type SO_Axis: C{string}
    
    @keyword Data_Paths: This contains the data paths and signals for the
                         requested detector banks
    @type Data_Paths: C{tuple} of C{tuple}s
    
    @keyword Signal_ROI: This is the name of a file that contains a list of
                         pixel IDs that will be read from the data file and
                         stored as a signal C{SOM}
    @type Signal_ROI: C{string}

    @keyword Signal_MASK: This is the name of a file that contains a list of
                         pixel IDs that will be read from the data file and
                         stored as a signal C{SOM}
    @type Signal_MASK: C{string}    
    
    @keyword dataset_type: The practical name of the dataset being processed.
                           The default value is I{data}.
    @type dataset_type: C{string}

    @keyword dst_type: The type of C{DST} to be created during file read-in.
                       The default value is I{application/x-NeXus}.
    @type dst_type: C{string}
    
    @keyword Verbose: This is a flag to turn on print statments. The default is
                      I{False}.
    @type Verbose: C{boolean}
    
    @keyword Timer: This is an SNS Timer object used for showing the
                    performance timing in the function.
    @type Timer: C{sns_timing.Timer}


    @return: Signal C{SOM.SOM} and background C{SOM.SOM}
    @rtype: C{tuple}

    
    @raise SystemExit: If any file cannot be read
    @raise RuntimeError: If both a ROI and MASK file are specified
    """
    import sys

    import common_lib
    import DST
    import hlr_utils

    # Parse keywords
    try:
        so_axis = kwargs["SO_Axis"]
    except KeyError:
        so_axis = "time_of_flight"

    try:
        data_paths = kwargs["Data_Paths"]
    except KeyError:
        data_paths = None

    try:
        signal_roi = kwargs["Signal_ROI"]
    except KeyError:
        signal_roi = None

    try:
        signal_mask = kwargs["Signal_MASK"]
    except KeyError:
        signal_mask = None

    try:
        dataset_type = kwargs["dataset_type"]
    except KeyError:
        dataset_type = "data"

    try:
        dst_type = kwargs["dst_type"]
    except KeyError:
        try:
            dst_type = hlr_utils.file_peeker(filelist[0])
        except RuntimeError:
            # Assume it is a NeXus file, since it is not a DR produced file
            dst_type = "application/x-NeXus"

    try:
        verbose = kwargs["Verbose"]
    except KeyError:
        verbose = False

    try:
        timer = kwargs["Timer"]
    except KeyError:
        timer = None

    if signal_roi is not None and signal_mask is not None:
        raise RuntimeError("Cannot specify both ROI and MASK file! Please " + "choose!")

    counter = 0

    for filename in filelist:
        if verbose:
            print "File:", filename

        try:
            if dst_type == "application/x-NeXus":
                data_dst = DST.getInstance(dst_type, filename)
            else:
                resource = open(filename, "r")
                data_dst = DST.getInstance(dst_type, resource)
        except SystemError:
            print "ERROR: Failed to data read file %s" % filename
            sys.exit(-1)

        if verbose:
            print "Reading data file %d" % counter

        if counter == 0:
            if dst_type == "application/x-NeXus":
                d_som1 = data_dst.getSOM(data_paths, so_axis, roi_file=signal_roi, mask_file=signal_mask)
                d_som1.rekeyNxPars(dataset_type)
            else:
                if dst_type != "text/Dave2d":
                    d_som1 = data_dst.getSOM(data_paths, roi_file=signal_roi, mask_file=signal_mask)
                else:
                    d_som1 = data_dst.getSOM(data_paths)

            if verbose:
                len_data = len(d_som1)
                print "# Signal SO:", len_data
                if len_data == 0:
                    print "All data has been filtered. Program exiting."
                    sys.exit(0)

                if dst_type == "application/x-NeXus":
                    print "# TOF:", len(d_som1[0])
                    print "# TOF Axis:", len(d_som1[0].axis[0].val)
                elif dst_type != "text/num-info":
                    print "# Data Size:", len(d_som1[0])
                    print "# X-Axis:", len(d_som1[0].axis[0].val)
                    try:
                        axis_len = len(d_som1[0].axis[1].val)
                        print "# Y-Axis:", axis_len
                    except IndexError:
                        pass

            if timer is not None:
                timer.getTime(msg="After reading data")

        else:
            if dst_type == "application/x-NeXus":
                d_som_t = data_dst.getSOM(data_paths, so_axis, roi_file=signal_roi, mask_file=signal_mask)
                d_som_t.rekeyNxPars(dataset_type)
                add_nxpars_sig = True
            else:
                if dst_type != "text/Dave2d":
                    d_som_t = data_dst.getSOM(data_paths, roi_file=signal_roi, mask_file=signal_mask)
                else:
                    d_som_t = data_dst.getSOM(data_paths)
                add_nxpars_sig = False

            if timer is not None:
                timer.getTime(msg="After reading data")

            d_som1 = common_lib.add_ncerr(d_som_t, d_som1, add_nxpars=add_nxpars_sig)

            if timer is not None:
                timer.getTime(msg="After adding spectra")

            del d_som_t

            if timer is not None:
                timer.getTime(msg="After SOM deletion")

        data_dst.release_resource()
        del data_dst
        counter += 1

        if timer is not None:
            timer.getTime(msg="After resource release and DST deletion")

        if dst_type == "application/x-NeXus":
            som_key_parts = [dataset_type, "filename"]
            som_key = "-".join(som_key_parts)

            d_som1.attr_list[som_key] = filelist
        else:
            # Previously written files already have this structure imposed
            pass

    return d_som1
Example #4
0
def run(config):
    """
    This method is where the data reduction process gets done.

    @param config: Object containing the data reduction configuration
                   information.
    @type config: L{hlr_utils.Configure}
    """
    import sys

    import dr_lib
    import DST
    import SOM

    banks = [("/entry/bank1", 1), ("/entry/bank2", 1)]

    max_ids = (64, 64)

    if config.vertical:
        tag = "v"
        size = max_ids[1]
        reps = max_ids[0] / config.pixel_group
        label = "Integrated pixel"
    else:
        tag = "h"
        size = max_ids[1] / config.pixel_group
        reps = max_ids[0] / config.sum_tubes
        label = "Tube Number"

    try:
        data_dst = DST.getInstance("application/x-NeXus", config.data)
    except SystemError:
        print "ERROR: Failed to data read file %s" % config.data
        sys.exit(-1)

    so_axis = "time_of_flight"

    for path in banks:
        bank = path[0].split('/')[-1]

        for i in range(size):

            tSOM = SOM.SOM()
            tSO = SOM.SO(construct=True)

            counter = 1
            for j in range(reps):

                if config.vertical:
                    starting_id = (i, config.pixel_group * j)
                    ending_id = (i + 1, config.pixel_group * (j + 1))
                else:
                    if config.sum_tubes == 1:
                        x1 = j
                        x2 = j + 1
                    else:
                        x1 = j * config.sum_tubes
                        x2 = (j + 1) * config.sum_tubes

                    starting_id = (x1, config.pixel_group * i)
                    ending_id = (x2, config.pixel_group * (i + 1))

                d_som1 = data_dst.getSOM(path,
                                         so_axis,
                                         start_id=starting_id,
                                         end_id=ending_id)

                d_som2 = dr_lib.sum_all_spectra(d_som1)
                d_som2[0].id = d_som1[0].id

                d_som1 = None
                del d_som1

                value = dr_lib.integrate_axis(d_som2)

                if config.verbose:
                    print "Sum", d_som2[0].id, ":", value[0], value[1]

                tSO.axis[0].val.append(counter)
                tSO.y.append(value[0])
                tSO.var_y.append(value[1])
                if counter == 1:
                    tSO.id = d_som2[0].id

                counter += 1

            tSOM.attr_list["filename"] = config.data
            tSOM.setTitle("TOF Pixel Summation")
            tSOM.setDataSetType("density")
            tSOM.setYLabel("Intensity Sum")
            tSOM.setYUnits("counts")
            tSOM.setAxisLabel(0, label)
            tSOM.setAxisUnits(0, "")
            tSOM.append(tSO)

            tag1 = str(i + 1)

            outfile = bank + "_" + tag + "_" + tag1 + ".tof"

            hlr_utils.write_file(outfile,
                                 "text/Spec",
                                 tSOM,
                                 verbose=config.verbose,
                                 message="intensity sum file",
                                 replace_ext=False)

    data_dst.release_resource()
Example #5
0
def run(config, tim=None):
    """
    This method is where the data reduction process gets done.

    @param config: Object containing the data reduction configuration
                   information.
    @type config: L{hlr_utils.Configure}

    @param tim: (OPTIONAL) Object that will allow the method to perform
                           timing evaluations.
    @type tim: C{sns_time.DiffTime}
    """
    import common_lib
    import dr_lib
    import DST

    if tim is not None:
        tim.getTime(False)
        old_time = tim.getOldTime()

    if config.data is None:
        raise RuntimeError("Need to pass a data filename to the driver "\
                           +"script.")

    # Read in geometry if one is provided
    if config.inst_geom is not None:
        if config.verbose:
            print "Reading in instrument geometry file"
            
        inst_geom_dst = DST.getInstance("application/x-NxsGeom",
                                        config.inst_geom)
    else:
        inst_geom_dst = None

    # Perform early background subtraction if the hwfix flag is used
    if config.hwfix:
        if not config.mc:
            so_axis = "time_of_flight"
        else:
            so_axis = "Time_of_Flight"
        
        bkg_som0 = dr_lib.add_files(config.back,
                                    Data_Paths=config.data_paths.toPath(),
                                    SO_Axis=so_axis,
                                    Signal_ROI=config.roi_file,
                                    dataset_type="background",
                                    Verbose=config.verbose, Timer=tim)

        bkg_som = dr_lib.fix_bin_contents(bkg_som0)
        del bkg_som0
    else:
        bkg_som = None

    # Perform Steps 1-15 on sample data
    d_som1 = dr_lib.process_igs_data(config.data, config, timer=tim,
                                     inst_geom_dst=inst_geom_dst,
                                     tib_const=config.tib_data_const,
                                     bkg_som=bkg_som)

    # Perform Steps 1-15 on empty can data
    if config.ecan is not None:
        e_som1 = dr_lib.process_igs_data(config.ecan, config, timer=tim,
                                         inst_geom_dst=inst_geom_dst,
                                         dataset_type="empty_can",
                                         tib_const=config.tib_ecan_const,
                                         bkg_som=bkg_som)
    else:
        e_som1 = None

    # Perform Steps 1-15 on normalization data            
    if config.norm is not None:
        n_som1 = dr_lib.process_igs_data(config.norm, config, timer=tim,
                                         inst_geom_dst=inst_geom_dst,
                                         dataset_type="normalization",
                                         tib_const=config.tib_norm_const,
                                         bkg_som=bkg_som)
    else:
        n_som1 = None

    # Perform Steps 1-15 on background data
    if config.back is not None and not config.hwfix:
        b_som1 = dr_lib.process_igs_data(config.back, config, timer=tim,
                                         inst_geom_dst=inst_geom_dst,
                                         dataset_type="background",
                                         tib_const=config.tib_back_const)
    else:
        b_som1 = None

    # Perform Step 1-15 on direct scattering background data
    if config.dsback is not None:
        ds_som1 = dr_lib.process_igs_data(config.dsback, config, timer=tim,
                                          inst_geom_dst=inst_geom_dst,
                                          tib_const=config.tib_dsback_const,
                                          dataset_type="dsbackground",
                                          bkg_som=bkg_som)

        # Note: time_zero_slope MUST be a tuple
        if config.time_zero_slope is not None:
            ds_som1.attr_list["Time_zero_slope"] = \
                                      config.time_zero_slope.toValErrTuple()

        # Note: time_zero_offset MUST be a tuple
        if config.time_zero_offset is not None:
            ds_som1.attr_list["Time_zero_offset"] = \
                                      config.time_zero_offset.toValErrTuple()
        
        # Step 16: Linearly interpolate TOF elastic range in direct scattering
        #          background data

        # First convert TOF elastic range to appropriate pixel initial
        # wavelengths
        if config.verbose:
            print "Determining initial wavelength range for elastic line"

        if tim is not None:
            tim.getTime(False)
        
        if config.tof_elastic is None:
            # Units are in microseconds
            tof_elastic_range = (140300, 141300)
        else:
            tof_elastic_range = config.tof_elastic
        
        ctof_elastic_low = dr_lib.convert_single_to_list(\
               "tof_to_initial_wavelength_igs_lin_time_zero",
               (tof_elastic_range[0], 0.0),
               ds_som1)
        
        ctof_elastic_high = dr_lib.convert_single_to_list(\
               "tof_to_initial_wavelength_igs_lin_time_zero",
               (tof_elastic_range[1], 0.0),
               ds_som1)
        
        ctof_elastic_range = [(ctof_elastic_low[i][0], ctof_elastic_high[i][0])
                              for i in xrange(len(ctof_elastic_low))]

        if tim is not None:
            tim.getTime(msg="After calculating initial wavelength range for "\
                        +"elastic line ")

        del ctof_elastic_low, ctof_elastic_high

        # Now interpolate spectra between TOF elastic range (converted to
        # initial wavelength)
        if config.verbose:
            print "Linearly interpolating direct scattering spectra"

        if tim is not None:
            tim.getTime(False)
            
        ds_som2 = dr_lib.lin_interpolate_spectra(ds_som1, ctof_elastic_range)

        if tim is not None:
            tim.getTime(msg="After linearly interpolating direct scattering "\
                        +"spectra ")

        if config.dump_dslin:
            ds_som2_1 = dr_lib.sum_all_spectra(ds_som2,\
                                  rebin_axis=config.lambda_bins.toNessiList())

            hlr_utils.write_file(config.output, "text/Spec", ds_som2_1,
                                 output_ext="lin",
                                 data_ext=config.ext_replacement,    
                                 path_replacement=config.path_replacement,
                                 verbose=config.verbose,
                                 message="dsbackground linear interpolation")
            del ds_som2_1
        
        del ds_som1
    else:
        ds_som2 = None

    if inst_geom_dst is not None:
        inst_geom_dst.release_resource()
        
    # Steps 17-18: Subtract background spectrum from sample spectrum
    if config.dsback is None:
        back_som = b_som1
        bkg_type = "background"
    else:
        back_som = ds_som2
        bkg_type = "dsbackground"
    d_som2 = dr_lib.subtract_bkg_from_data(d_som1, back_som,
                                           verbose=config.verbose,
                                           timer=tim,
                                           dataset1="data",
                                           dataset2=bkg_type,
                                           scale=config.scale_bs)

    if config.dsback is not None:
        del ds_som2 

    # Step 19: Zero region outside TOF elastic for background for empty can
    if config.dsback is None:
        bcs_som = b_som1
        cs_som = e_som1
    else:
        if config.verbose and b_som1 is not None:
            print "Zeroing background spectra"

        if tim is not None and b_som1 is not None:
            tim.getTime(False)
            
        bcs_som = dr_lib.zero_spectra(b_som1, ctof_elastic_range)

        if tim is not None and b_som1 is not None:
            tim.getTime(msg="After zeroing background spectra")


        if config.verbose and e_som1 is not None:
            print "Zeroing empty can spectra"

        if tim is not None and e_som1 is not None:
            tim.getTime(False)
            
        cs_som = dr_lib.zero_spectra(e_som1, ctof_elastic_range)

        if tim is not None and e_som1 is not None:
            tim.getTime(msg="After zeroing empty can spectra")
            
        del ctof_elastic_range

    # Steps 20-21: Subtract background spectrum from empty can spectrum    
    e_som2 = dr_lib.subtract_bkg_from_data(cs_som, bcs_som,
                                           verbose=config.verbose,
                                           timer=tim,
                                           dataset1="empty_can",
                                           dataset2="background",
                                           scale=config.scale_bcs)

    # Steps 22-23: Subtract background spectrum from empty can spectrum for
    #              normalization
    e_som3 = dr_lib.subtract_bkg_from_data(e_som1, b_som1,
                                           verbose=config.verbose,
                                           timer=tim,
                                           dataset1="empty_can",
                                           dataset2="background",
                                           scale=config.scale_bcn)

    # Steps 24-25: Subtract background spectrum from normalization spectrum
    n_som2 = dr_lib.subtract_bkg_from_data(n_som1, b_som1,
                                           verbose=config.verbose,
                                           timer=tim,
                                           dataset1="normalization",
                                           dataset2="background",
                                           scale=config.scale_bn)

    del b_som1, e_som1, bcs_som, cs_som

    # Steps 26-27: Subtract empty can spectrum from sample spectrum    
    d_som3 = dr_lib.subtract_bkg_from_data(d_som2, e_som2,
                                           verbose=config.verbose,
                                           timer=tim,
                                           dataset1="data",
                                           dataset2="empty_can",
                                           scale=config.scale_cs)

    del d_som2, e_som2
    
    # Steps 28-29: Subtract empty can spectrum from normalization spectrum
    n_som3 = dr_lib.subtract_bkg_from_data(n_som2, e_som3,
                                           verbose=config.verbose,
                                           timer=tim,
                                           dataset1="normalization",
                                           dataset2="empty_can",
                                           scale=config.scale_cn)    

    del n_som2, e_som3

    # Step 30-32: Integrate normalization spectra
    if config.verbose and n_som3 is not None:
        print "Integrating normalization spectra"

    norm_int = dr_lib.integrate_spectra(n_som3, start=config.norm_start,
                                        end=config.norm_end, norm=True)

    del n_som3
        
    # Step 33: Normalize data by integrated values
    if config.verbose and norm_int is not None:
        print "Normalizing data by normalization data"

    if norm_int is not None:
        d_som4 = common_lib.div_ncerr(d_som3, norm_int)
    else:
        d_som4 = d_som3

    if norm_int is not None:
        if tim is not None:
            tim.getTime(msg="After normalizing data ")

    del d_som3, norm_int

    # Step 35: Convert initial wavelength to E_initial
    if config.verbose:
        print "Converting initial wavelength to E_initial"
        
    if tim is not None:
        tim.getTime(False)

    d_som6 = common_lib.wavelength_to_energy(d_som4)
        
    if tim is not None:
        tim.getTime(msg="After converting initial wavelength to E_initial ")

    if config.dump_initial_energy:
        hlr_utils.write_file(config.output, "text/Spec", d_som6,
                             output_ext="ixl",
                             data_ext=config.ext_replacement,
                             path_replacement=config.path_replacement,
                             verbose=config.verbose,
                             message="pixel initial energy information")
            
    del d_som4

    # Steps 36-37: Calculate energy transfer
    if config.verbose:
        print "Calculating energy transfer"

    if tim is not None:
        tim.getTime(False)

    d_som7 = dr_lib.igs_energy_transfer(d_som6)

    if tim is not None:
        tim.getTime(msg="After calculating energy transfer ")
        
    if config.dump_energy:
        hlr_utils.write_file(config.output, "text/Spec", d_som7,
                             output_ext="exl",
                             data_ext=config.ext_replacement,
                             path_replacement=config.path_replacement,
                             verbose=config.verbose,
                             message="pixel energy transfer information")

    # Write 3-column ASCII file for E_t
    d_som7_1 = dr_lib.sum_all_spectra(d_som7,
                                      rebin_axis=config.E_bins.toNessiList())
    hlr_utils.write_file(config.output, "text/Spec", d_som7_1,
                         output_ext="etr",
                         data_ext=config.ext_replacement,
                         path_replacement=config.path_replacement,
                         verbose=config.verbose,
                         message="combined energy transfer information") 
    
    del d_som7_1

    # Steps 34,36-37: Calculate scaled energy transfer
    if config.verbose:
        print "Calculating scaled energy transfer"
        
    d_som9 = dr_lib.igs_energy_transfer(d_som6, scale=True)
    
    if tim is not None:
        tim.getTime(msg="After calculating scaled energy transfer ")

    if config.dump_energy:
        hlr_utils.write_file(config.output, "text/Spec", d_som9,
                             output_ext="sexl",
                             data_ext=config.ext_replacement,    
                             path_replacement=config.path_replacement,
                             verbose=config.verbose,
                             message="pixel scaled energy transfer "\
                             +"information")

    # Write 3-column ASCII file for scaled E_t
    d_som9_1 = dr_lib.sum_all_spectra(d_som9,
                                      rebin_axis=config.E_bins.toNessiList())
    hlr_utils.write_file(config.output, "text/Spec", d_som9_1,
                         output_ext="setr",
                         data_ext=config.ext_replacement,            
                         path_replacement=config.path_replacement,
                         verbose=config.verbose,
                         message="combined scaled energy transfer "\
                         +"information") 
    
    del d_som9_1
    
    del d_som6, d_som7
        
    d_som9.attr_list["config"] = config
    
    hlr_utils.write_file(config.output, "text/rmd", d_som9,
                         output_ext="rmd",
                         data_ext=config.ext_replacement,         
                         path_replacement=config.path_replacement,
                         verbose=config.verbose,
                         message="metadata")
    
    if tim is not None:
        tim.setOldTime(old_time)
        tim.getTime(msg="Total Running Time")
Example #6
0
# for any purpose and without fee.
#
# This material was prepared as an account of work sponsored by an agency of
# the United States Government.  Neither the United States Government nor the
# United States Department of Energy, nor any of their employees, makes any
# warranty, express or implied, or assumes any legal liability or
# responsibility for the accuracy, completeness, or usefulness of any
# information, apparatus, product, or process disclosed, or represents that
# its use would not infringe privately owned rights.
#

# $Id$

import sys
import DST

filename = sys.argv[1]

ifile = open(filename, "r")

nif = DST.NumInfoDST(ifile)

som = nif.getSOM()

nif.release_resource()

print som.attr_list
print "Y Label:", som.getYLabel()
print "Y Units:", som.getYUnits()
print som
Example #7
0
#

# $Id$

import DST
from SOM import SOM
from SOM import SO

filename_SOM1 = "stuff1.dat"

SOM1 = SOM()
SOM1.attr_list["filename"] = filename_SOM1
SOM1.setTitle("This is a test")

for i in range(2):
    SO1 = SO()
    for j in range(10):
        SO1.id = i
        SO1.axis[0].val.append(j + 1)
        SO1.y.append(1000 + j + (20 * j))
        SO1.var_y.append(100 + j)
    SO1.axis[0].val.append(11)

    SOM1.append(SO1)

file = open(filename_SOM1, "w")

gsas = DST.GsasDST(file)
gsas.writeSOM(SOM1)
gsas.release_resource()
def run(config, tim=None):
    """
    This method is where the data reduction process gets done.

    @param config: Object containing the data reduction configuration
                   information.
    @type config: L{hlr_utils.Configure}

    @param tim: (OPTIONAL) Object that will allow the method to perform
                           timing evaluations.
    @type tim: C{sns_time.DiffTime}
    """
    import common_lib
    import dr_lib
    import DST

    if tim is not None:
        tim.getTime(False)
        old_time = tim.getOldTime()

    if config.data is None:
        raise RuntimeError("Need to pass a data filename to the driver "\
                           +"script.")

    # Read in geometry if one is provided
    if config.inst_geom is not None:
        if config.verbose:
            print "Reading in instrument geometry file"
            
        inst_geom_dst = DST.getInstance("application/x-NxsGeom",
                                        config.inst_geom)
    else:
        inst_geom_dst = None

    # Perform early background subtraction if the hwfix flag is used
    if config.hwfix:
        if not config.mc:
            so_axis = "time_of_flight"
        else:
            so_axis = "Time_of_Flight"
        
        bkg_som0 = dr_lib.add_files(config.back,
                                    Data_Paths=config.data_paths.toPath(),
                                    SO_Axis=so_axis,
                                    Signal_ROI=config.roi_file,
                                    dataset_type="background",
                                    Verbose=config.verbose, Timer=tim)

        bkg_som = dr_lib.fix_bin_contents(bkg_som0)
        del bkg_som0
    else:
        bkg_som = None

    # Perform Steps 1-15 on sample data
    d_som1 = dr_lib.process_igs_data(config.data, config, timer=tim,
                                     inst_geom_dst=inst_geom_dst,
                                     tib_const=config.tib_data_const,
                                     bkg_som=bkg_som)

    # Perform Steps 1-15 on empty can data
    if config.ecan is not None:
        e_som1 = dr_lib.process_igs_data(config.ecan, config, timer=tim,
                                         inst_geom_dst=inst_geom_dst,
                                         dataset_type="empty_can",
                                         tib_const=config.tib_ecan_const,
                                         bkg_som=bkg_som)
    else:
        e_som1 = None

    # Perform Steps 1-15 on normalization data            
    if config.norm is not None:
        n_som1 = dr_lib.process_igs_data(config.norm, config, timer=tim,
                                         inst_geom_dst=inst_geom_dst,
                                         dataset_type="normalization",
                                         tib_const=config.tib_norm_const,
                                         bkg_som=bkg_som)
    else:
        n_som1 = None

    # Perform Steps 1-15 on background data
    if config.back is not None:
        b_som1 = dr_lib.process_igs_data(config.back, config, timer=tim,
                                         inst_geom_dst=inst_geom_dst,
                                         dataset_type="background",
                                         tib_const=config.tib_back_const,
                                         bkg_som=bkg_som)
    else:
        b_som1 = None

    # Perform Step 1-15 on direct scattering background data
    if config.dsback is not None:
        ds_som1 = dr_lib.process_igs_data(config.dsback, config, timer=tim,
                                          inst_geom_dst=inst_geom_dst,
                                          tib_const=config.tib_dsback_const,
                                          dataset_type="dsbackground",
                                          bkg_som=bkg_som)

        # Note: time_zero_slope MUST be a tuple
        if config.time_zero_slope is not None:
            ds_som1.attr_list["Time_zero_slope"] = \
                                      config.time_zero_slope.toValErrTuple()

        # Note: time_zero_offset MUST be a tuple
        if config.time_zero_offset is not None:
            ds_som1.attr_list["Time_zero_offset"] = \
                                      config.time_zero_offset.toValErrTuple()
        
        # Step 16: Linearly interpolate TOF elastic range in direct scattering
        #          background data

        # First convert TOF elastic range to appropriate pixel initial
        # wavelengths
        if config.verbose:
            print "Determining initial wavelength range for elastic line"

        if tim is not None:
            tim.getTime(False)
        
        if config.tof_elastic is None:
            # Units are in microseconds
            tof_elastic_range = (140300, 141300)
        else:
            tof_elastic_range = config.tof_elastic
        
        ctof_elastic_low = dr_lib.convert_single_to_list(\
               "tof_to_initial_wavelength_igs_lin_time_zero",
               (tof_elastic_range[0], 0.0),
               ds_som1)
        
        ctof_elastic_high = dr_lib.convert_single_to_list(\
               "tof_to_initial_wavelength_igs_lin_time_zero",
               (tof_elastic_range[1], 0.0),
               ds_som1)
        
        ctof_elastic_range = [(ctof_elastic_low[i][0], ctof_elastic_high[i][0])
                              for i in xrange(len(ctof_elastic_low))]

        if tim is not None:
            tim.getTime(msg="After calculating initial wavelength range for "\
                        +"elastic line ")

        del ctof_elastic_low, ctof_elastic_high

        if config.split:
            lambda_filter = [(d_som1[i].axis[0].val[0],
                              d_som1[i].axis[0].val[-1])
                             for i in xrange(len(d_som1))]
        else:
            lambda_filter = None

        # Now interpolate spectra between TOF elastic range (converted to
        # initial wavelength)
        if config.verbose:
            print "Linearly interpolating direct scattering spectra"

        if tim is not None:
            tim.getTime(False)
            
        ds_som2 = dr_lib.lin_interpolate_spectra(ds_som1, ctof_elastic_range,
                                                 filter_axis=lambda_filter)

        if tim is not None:
            tim.getTime(msg="After linearly interpolating direct scattering "\
                        +"spectra ")

        if config.dump_dslin:
            ds_som2_1 = dr_lib.sum_all_spectra(ds_som2,\
                                  rebin_axis=config.lambda_bins.toNessiList())

            hlr_utils.write_file(config.output, "text/Spec", ds_som2_1,
                                 output_ext="lin",
                                 data_ext=config.ext_replacement,    
                                 path_replacement=config.path_replacement,
                                 verbose=config.verbose,
                                 message="dsbackground linear interpolation")
            del ds_som2_1
        
        del ds_som1
    else:
        ds_som2 = None

    if inst_geom_dst is not None:
        inst_geom_dst.release_resource()

    # Steps 17-18: Subtract background spectrum from sample spectrum
    if config.dsback is None:
        back_som = b_som1
        bkg_type = "background"
    else:
        back_som = ds_som2
        bkg_type = "dsbackground"
    d_som2 = dr_lib.subtract_bkg_from_data(d_som1, back_som,
                                           verbose=config.verbose,
                                           timer=tim,
                                           dataset1="data",
                                           dataset2=bkg_type,
                                           scale=config.scale_bs)

    if config.dsback is not None:
        del ds_som2 

    # Step 19: Zero region outside TOF elastic for background for empty can
    if config.dsback is None:
        bcs_som = b_som1
        cs_som = e_som1
    else:
        if config.verbose and b_som1 is not None:
            print "Zeroing background spectra"

        if tim is not None and b_som1 is not None:
            tim.getTime(False)
            
        bcs_som = dr_lib.zero_spectra(b_som1, ctof_elastic_range)

        if tim is not None and b_som1 is not None:
            tim.getTime(msg="After zeroing background spectra")


        if config.verbose and e_som1 is not None:
            print "Zeroing empty can spectra"

        if tim is not None and e_som1 is not None:
            tim.getTime(False)
            
        cs_som = dr_lib.zero_spectra(e_som1, ctof_elastic_range)

        if tim is not None and e_som1 is not None:
            tim.getTime(msg="After zeroing empty can spectra")
            
        del ctof_elastic_range

    # Steps 20-21: Subtract background spectrum from empty can spectrum    
    e_som2 = dr_lib.subtract_bkg_from_data(cs_som, bcs_som,
                                           verbose=config.verbose,
                                           timer=tim,
                                           dataset1="data-empty_can",
                                           dataset2="background",
                                           scale=config.scale_bcs)

    # Steps 22-23: Subtract background spectrum from empty can spectrum for
    #              normalization

    try:
        config.pre_norm
    except AttributeError:
        config.pre_norm = False

    if not config.pre_norm:
        e_som3 = dr_lib.subtract_bkg_from_data(e_som1, b_som1,
                                               verbose=config.verbose,
                                               timer=tim,
                                               dataset1="norm-empty_can",
                                               dataset2="background",
                                               scale=config.scale_bcn)
    else:
        e_som3 = None

    # Steps 24-25: Subtract background spectrum from normalization spectrum
    if not config.pre_norm:
        n_som2 = dr_lib.subtract_bkg_from_data(n_som1, b_som1,
                                               verbose=config.verbose,
                                               timer=tim,
                                               dataset1="normalization",
                                               dataset2="background",
                                               scale=config.scale_bn)
    else:
        n_som2 = n_som1

    del b_som1, e_som1, bcs_som, cs_som

    # Steps 26-27: Subtract empty can spectrum from sample spectrum    
    d_som3 = dr_lib.subtract_bkg_from_data(d_som2, e_som2,
                                           verbose=config.verbose,
                                           timer=tim,
                                           dataset1="data",
                                           dataset2="empty_can",
                                           scale=config.scale_cs)

    del d_som2, e_som2
    
    # Steps 28-29: Subtract empty can spectrum from normalization spectrum
    if not config.pre_norm:
        n_som3 = dr_lib.subtract_bkg_from_data(n_som2, e_som3,
                                               verbose=config.verbose,
                                               timer=tim,
                                               dataset1="normalization",
                                               dataset2="empty_can",
                                               scale=config.scale_cn)
    else:
        n_som3 = n_som2

    del n_som2, e_som3

    # Step 30-31: Integrate normalization spectra
    if config.verbose and n_som3 is not None and not config.pre_norm:
        print "Integrating normalization spectra"

    if not config.pre_norm:
        norm_int = dr_lib.integrate_spectra(n_som3, start=config.norm_start,
                                            end=config.norm_end, norm=True)
    else:
        norm_int = n_som3

    del n_som3
        
    # Step 32: Normalize data by integrated values
    if config.verbose and norm_int is not None:
        print "Normalizing data by normalization data"

    if norm_int is not None:
        d_som4 = common_lib.div_ncerr(d_som3, norm_int)
    else:
        d_som4 = d_som3

    if norm_int is not None:
        if tim is not None:
            tim.getTime(msg="After normalizing data ")

    del d_som3, norm_int

    if config.dump_norm:
        if tim is not None:
            tim.getTime(False)

        hlr_utils.write_file(config.output, "text/Spec", d_som4,
                             output_ext="wvn",
                             data_ext=config.ext_replacement,    
                             path_replacement=config.path_replacement,
                             verbose=config.verbose,
                             message="wavelength (vanadium norm) information")

        if tim is not None:
            tim.getTime(msg="After writing wavelength (vanadium norm) info ")

    # Steps 33 to end: Creating S(Q,E)
    if config.Q_bins is not None:
        if config.verbose:
            print "Creating 2D spectrum"

        if tim is not None:
            tim.getTime(False)

        d_som5 = dr_lib.create_E_vs_Q_igs(d_som4,
                                          config.E_bins.toNessiList(),
                                          config.Q_bins.toNessiList(),
                                          so_id="Full Detector",
                                          y_label="counts",
                                          y_units="counts / (ueV * A^-1)",
                                          x_labels=["Q transfer",
                                                    "energy transfer"],
                                          x_units=["1/Angstroms","ueV"],
                                          split=config.split,
                                          Q_filter=False,
                                          configure=config)
        if tim is not None:
            tim.getTime(msg="After creation of final spectrum ")

        del d_som4
        
    # Steps 33 to 36: Create S(-cos(polar), E)
    elif config.ncospol_bins is not None:
        if config.verbose:
            print "Convert wavelength to energy transfer"

        if tim is not None:
            tim.getTime(False)

        d_som4a = dr_lib.energy_transfer(d_som4, "IGS", "Wavelength_final",
                                         sa_norm=True, scale=True,
                                         change_units=True)

        if tim is not None:
            tim.getTime(msg="After wavelength to energy transfer conversion ")

        del d_som4

        if config.verbose:
            print "Creating 2D spectrum"

        if tim is not None:
            tim.getTime(False)

        d_som5 = dr_lib.create_param_vs_Y(d_som4a, "polar",
                                        "negcos_param_array",
                                        config.ncospol_bins.toNessiList(),
                                        rebin_axis=config.E_bins.toNessiList(),
                                        y_label="counts",
                                        y_units="counts / ueV",
                                        x_labels=["-cos(polar)",
                                                  "Energy Transfer"], 
                                        x_units=["", "ueV"])

        if tim is not None:
            tim.getTime(msg="After creation of final spectrum ")        
    
    # If rescaling factor present, rescale the data
    if config.rescale_final is not None and not config.split:
        d_som6 = common_lib.mult_ncerr(d_som5, (config.rescale_final, 0.0))
    else:
        d_som6 = d_som5

    if tim is None:
        old_time = None

    if not __name__ == "amorphous_reduction_sqe":
        del d_som5
        __write_output(d_som6, config, tim, old_time)
    else:
        if config.create_output:
            del d_som5
            __write_output(d_som6, config, tim, old_time)
        else:
            return d_som6
Example #9
0
# its use would not infringe privately owned rights.
#

# $Id$

import DST
from SOM import SOM
from SOM import SO
from time import localtime, strftime, time

filename_SOM1 = "stuff1.dat"

SOM1 = SOM()
SOM1.attr_list["filename"] = filename_SOM1
SOM1.attr_list["epoch"] = time()
SOM1.attr_list["timestamp"] = DST.make_ISO8601(SOM1.attr_list["epoch"])
SOM1.attr_list["username"] = "******"
SOM1.setAllAxisLabels(["Q", "E"])
SOM1.setAllAxisUnits(["A-1", "meV"])
SOM1.setYLabel("Intensity")
SOM1.setYUnits("Counts/(meV A-1))")

SO1 = SO(2)
SO1.id = 0
SO1.axis[0].val.extend(range(5))
SO1.axis[1].val.extend(range(10))

y_len = (len(SO1.axis[0].val)-1) * (len(SO1.axis[1].val)-1)
y = range(y_len)
SO1.y.extend(y)
SO1.var_y.extend(y)
Example #10
0
def write_file(filename, dst_type, data, **kwargs):
    """
    This function performs the steps necessary to write an output file. One
    can pass a data filename or an output filename. If a data filename is
    passed, the data file extension, output file extension and the replace
    keyword must be passed. The expected data object to write to the file is
    a C{SOM}. B{NOTE}: Extra keyword arguments can be passed onto the C{DST}
    instance via calling them in the kwargs list. Those arguments will not be
    processed by this function, but just pass them on.

    @param filename: The name of the data file from which the output is
                     generated or the name of an output file
    @type filename: C{string}
    
    @param dst_type: The MIME type of the output formatter
    @type dst_type: C{string}
    
    @param data: Object that contains the output to be written to file
    @type data: C{SOM.SOM}
    
    @param kwargs: A list of keyword arguments that the function accepts:
    
    @keyword message: This is part of the message that will be printed to
                      STDOUT if verbose keyword is set to True. The default
                      message is \"output file\"
    @type message: C{string}
    
    @keyword data_ext: This is the extension on the data file. This is used in
                       conjunction with output_ext and replace to convert the
                       data filename into an output filename. The default
                       value is \"nxs\".
    @type data_ext: C{string}
    
    @keyword output_ext: This is the extension to be used for the output file.
                         The default value is \"txt\".
    @type output_ext: C{string}
    
    @keyword verbose: This determines whether or not the print statement is
    executed. The default value is I{False}.
    @type verbose: C{boolean}
    
    @keyword replace_ext: This determines whether or not the extension on the
                          incoming filename is replaced with output_ext. The
                          default behavior is I{True} (replace extension)
    @type replace_ext: C{boolean}
    
    @keyword replace_path: This determines whether or not the directory path on
                           the incoming filename is replaced with the
                           directory where the driver is running. The default
                           behavior is I{True} (replace path)
    @type replace_path: C{boolean}
    
    @keyword path_replacement: This is a directory path that will be prepended
                               to the output filename. The default value is
                               C{None} and will cause the working directory to
                               be the prepended path.
    @type path_replacement: C{string}
    
    @keyword extra_tag: This is a tag that will be inserted into the file name
                        just before the file extension.
    @type extra_tag: C{string}

    @keyword getsom_kwargs: This is a collection of keyword arguments that
                            are to be passed to the writeSOM function call.
    @type getsom_kwargs: C{dict}
    """

    import os

    import DST
    import hlr_utils

    try:
        message = kwargs["message"]
    except KeyError:
        message = "output file"

    try:
        data_ext = kwargs["data_ext"]
    except KeyError:
        data_ext = "nxs"

    try:
        output_ext = kwargs["output_ext"]
    except KeyError:
        output_ext = "txt"

    try:
        verbose = kwargs["verbose"]
    except KeyError:
        verbose = False

    try:
        replace_path = kwargs["replace_path"]
    except KeyError:
        replace_path = True

    try:
        path_replacement = kwargs["path_replacement"]
    except KeyError:
        path_replacement = None       

    try:
        replace_ext = kwargs["replace_ext"]
    except KeyError:
        replace_ext = True        

    try:
        extra_tag = kwargs["extra_tag"]
    except KeyError:
        extra_tag = None

    try:
        arguments = kwargs["arguments"]
    except KeyError:
        arguments = None

    getsom_kwargs = kwargs.get("getsom_kwargs", {})

    if replace_path:
        if path_replacement is None:
            path_replacement = os.getcwd()
            
        fixed_filename = os.path.join(path_replacement,
                                      os.path.basename(filename))
    else:
        fixed_filename = filename

    if replace_ext:
        fixed_filename = hlr_utils.ext_replace(fixed_filename, data_ext,
                                               output_ext)
    else:
        pass

    if extra_tag is not None:
        fixed_filename = hlr_utils.add_tag(fixed_filename, extra_tag)

    # Handle difference between NeXus and other files
    if dst_type != "application/x-RedNxs":
        resource = open(fixed_filename, "w")
    else:
        resource = fixed_filename
        
    output_dst = DST.getInstance(dst_type, resource, arguments, **kwargs)
    if verbose:
        print "Writing %s" % message

    output_dst.writeSOM(data, **getsom_kwargs)
    output_dst.release_resource()
Example #11
0
import DST
import sys

if __name__=="__main__":
    data_filename = None
    geom_filename = None
    
    try:
        data_filename = sys.argv[1]
    except IndexError:
        pass # use the default name

    try:
        geom_filename = sys.argv[2]
    except IndexError:
        pass # use the default name

    id_tag = -1
    
    x_axis = "time_of_flight"
    dst = DST.getInstance("application/x-NeXus", data_filename)
    print "**********",data_filename
    som_list = dst.get_SOM_ids()
    print "SOM ID:",som_list[id_tag]

    som = dst.getSOM(som_list[id_tag], x_axis, end_id=(0,20))

    geom = DST.getInstance("application/x-NxsGeom", geom_filename)
    geom.setGeometry(som_list[id_tag], som)
Example #12
0
def run(config):
    import sys

    import DST

    if config.data is None:
        raise RuntimeError("Need to pass a data filename to the driver "\
                           +"script.")

    try:
        data_dst = DST.getInstance("application/x-NeXus", config.data)
    except SystemError:
        print "ERROR: Failed to data read file %s" % config.data
        sys.exit(-1)

    so_axis = "time_of_flight"

    if config.Q_bins is not None:
        rmd_written = False
    else:
        rmd_written = True

    if config.verbose:
        print "Reading data file"

    d_som1 = data_dst.getSOM(config.data_paths, so_axis)

    if config.det_geom is not None:
        if config.verbose:
            print "Reading in detector geometry file"

        det_geom_dst = DST.getInstance("application/x-NxsGeom",
                                       config.det_geom)
        det_geom_dst.setGeometry(config.data_paths, d_som1)
        det_geom_dst.release_resource()

    if config.d_bins is not None:

        if config.verbose:
            print "Converting TOF to d-spacing"

        d_som2 = convert_data_to_d_spacing(d_som1)

        if config.dump_pxl:
            hlr_utils.write_file(config.data,
                                 "text/Spec",
                                 d_som2,
                                 output_ext="dsp",
                                 verbose=config.verbose,
                                 message="pixel d-spacing information")

        d_som3 = common_lib.rebin_axis_1D(d_som2, config.d_bins)

        d_som4 = dr_lib.sum_all_spectra(d_som3)

        d_som4[0].id = ("bank3", (0, 0))

        hlr_utils.write_file(config.output_ds,
                             "text/Spec",
                             d_som4,
                             replace_ext=False,
                             verbose=config.verbose,
                             message="combined d-spacing information")

        if config.verbose:
            print "Converting d-spacing to TOF focused detector"

        d_som5 = convert_data_to_tof_focused_det(config, d_som2)

        if config.dump_pxl:
            hlr_utils.write_file(config.data,
                                 "text/Spec",
                                 d_som5,
                                 output_ext="tfp",
                                 verbose=config.verbose,
                                 message="pixel TOF focused information")

        d_som6 = common_lib.rebin_axis_1D(d_som5, config.tof_bins)

        d_som7 = dr_lib.sum_all_spectra(d_som6)

        d_som7[0].id = config.pixel_id

        hlr_utils.write_file(config.output_tof,
                             "text/GSAS",
                             d_som7,
                             replace_ext=False,
                             verbose=config.verbose,
                             message="combined TOF focused information")

        hlr_utils.write_file(config.data,
                             "text/Spec",
                             d_som7,
                             output_ext="toff",
                             verbose=config.verbose,
                             message="combined TOF focused information")

        d_som7.attr_list["config"] = config

        if rmd_written:
            hlr_utils.write_file(config.data,
                                 "text/rmd",
                                 d_som7,
                                 output_ext="rmd",
                                 verbose=config.verbose,
                                 message="metadata")

    else:
        pass

    if config.Q_bins is not None:
        if config.verbose:
            print "Converting TOF to Q"

        d_som2 = convert_data_to_scalar_Q(config, d_som1)

        if config.dump_pxl:
            hlr_utils.write_file(config.data,
                                 "text/Spec",
                                 d_som2,
                                 output_ext="qtp",
                                 verbose=config.verbose,
                                 message="pixel Q information")

        d_som3 = common_lib.rebin_axis_1D(d_som2, config.Q_bins)

        d_som4 = dr_lib.sum_all_spectra(d_som3)

        d_som4[0].id = ("bank3", (0, 0))

        hlr_utils.write_file(config.output_qt,
                             "text/Spec",
                             d_som4,
                             replace_ext=False,
                             verbose=config.verbose,
                             message="combined Q information")

        hlr_utils.write_file(config.data,
                             "text/rmd",
                             d_som7,
                             output_ext="rmd",
                             verbose=config.verbose,
                             message="metadata")
    else:
        pass
Example #13
0
def run(config, tim=None):
    """
    This method is where the data reduction process gets done.

    @param config: Object containing the data reduction configuration
                   information.
    @type config: L{hlr_utils.Configure}

    @param tim: (OPTIONAL) Object that will allow the method to perform
                           timing evaluations.
    @type tim: C{sns_time.DiffTime}
    """
    import common_lib
    import dr_lib
    import DST

    if tim is not None:
        tim.getTime(False)
        old_time = tim.getOldTime()

    if config.data is None:
        raise RuntimeError("Need to pass a data filename to the driver " + "script.")

    # Read in geometry if one is provided
    if config.inst_geom is not None:
        if config.verbose:
            print "Reading in instrument geometry file"

        inst_geom_dst = DST.getInstance("application/x-NxsGeom", config.inst_geom)
    else:
        inst_geom_dst = None

    # Add so_axis to Configure object
    config.so_axis = "time_of_flight"

    dataset_type = "background"

    # Step 0: Open appropriate data files

    # Data
    if config.verbose:
        print "Reading %s file" % dataset_type

    # The [0] is to get the data SOM and ignore the None background SOM
    dp_som = dr_lib.add_files(
        config.data,
        Data_Paths=config.data_paths.toPath(),
        SO_Axis=config.so_axis,
        Signal_ROI=config.roi_file,
        dataset_type=dataset_type,
        Verbose=config.verbose,
        Timer=tim,
    )

    if tim is not None:
        tim.getTime(msg="After reading %s " % dataset_type)

    dp_som0 = dr_lib.fix_bin_contents(dp_som)

    del dp_som

    if inst_geom_dst is not None:
        inst_geom_dst.setGeometry(config.data_paths.toPath(), dp_som0)

    # Note: time_zero_offset_det MUST be a tuple
    if config.time_zero_offset_det is not None:
        dp_som0.attr_list["Time_zero_offset_det"] = config.time_zero_offset_det.toValErrTuple()

    # Step 2: Convert TOF to wavelength for data
    if config.verbose:
        print "Converting TOF to wavelength"

    if tim is not None:
        tim.getTime(False)

    # Convert detector pixels
    dp_som1 = common_lib.tof_to_wavelength_lin_time_zero(
        dp_som0, units="microsecond", time_zero_offset=config.time_zero_offset_det.toValErrTuple(), inst_param="total"
    )

    if tim is not None:
        tim.getTime(msg="After converting TOF to wavelength ")

    del dp_som0

    if config.verbose:
        print "Cutting spectra"

    if tim is not None:
        tim.getTime(False)

    dp_som2 = dr_lib.cut_spectra(dp_som1, config.lambda_low_cut, config.lambda_high_cut)

    if tim is not None:
        tim.getTime(msg="After cutting spectra ")

    del dp_som1

    rebin_axis = config.lambda_bins.toNessiList()

    # Put the data on the same axis
    if config.verbose:
        print "Rebinning data onto specified wavelength axis"

    if tim is not None:
        tim.getTime(False)

    dp_som3 = dr_lib.sum_by_rebin_frac(dp_som2, rebin_axis)

    if tim is not None:
        tim.getTime(msg="After rebinning data onto specified wavelength axis ")

    del dp_som2

    data_run_time = dp_som3.attr_list["background-duration"]

    # Calculate the accelerator on time
    if config.verbose:
        print "Calculating accelerator on time"

    acc_on_time = hlr_utils.DrParameter(data_run_time.getValue() - config.acc_down_time.getValue(), 0.0, "seconds")

    # Get the number of data bins
    num_wave_bins = len(rebin_axis) - 1

    # Calculate the scaled accelerator uptime
    if config.verbose:
        print "Calculating the scaled accelerator uptime"

    if tim is not None:
        tim.getTime(False)

    final_scale = acc_on_time.toValErrTuple()[0] / num_wave_bins

    if tim is not None:
        tim.getTime(msg="After calculating the scaled accelerator uptime ")

    # Create the final background spectrum
    if config.verbose:
        print "Creating the background spectrum"

    if tim is not None:
        tim.getTime(False)

    dp_som4 = common_lib.div_ncerr(dp_som3, (final_scale, 0))
    dp_som4.attr_list["%s-Scaling" % dataset_type] = final_scale

    if tim is not None:
        tim.getTime(msg="After creating background spectrum ")

    del dp_som3

    # Write out the background spectrum
    hlr_utils.write_file(
        config.output,
        "text/Spec",
        dp_som4,
        verbose=config.verbose,
        output_ext="bkg",
        data_ext=config.ext_replacement,
        replace_path=False,
        replace_ext=True,
        message="background spectrum",
    )

    dp_som4.attr_list["config"] = config

    hlr_utils.write_file(
        config.output,
        "text/rmd",
        dp_som4,
        output_ext="rmd",
        data_ext=config.ext_replacement,
        path_replacement=config.path_replacement,
        verbose=config.verbose,
        message="metadata",
    )

    if tim is not None:
        tim.setOldTime(old_time)
        tim.getTime(msg="Total Running Time")
Example #14
0
def run(config, tim):
    """
    This method is where the data reduction process gets done.

    @param config: Object containing the data reduction configuration
                   information.
    @type config: L{hlr_utils.Configure}

    @param tim: Object that will allow the method to perform timing
                evaluations.
    @type tim: C{sns_time.DiffTime}
    """
    import DST
    import math
    if config.inst == "REF_M":
        import axis_manip
        import utils

    if tim is not None:
        tim.getTime(False)
        old_time = tim.getOldTime()

    if config.data is None:
        raise RuntimeError("Need to pass a data filename to the driver "\
                           +"script.")

    # Read in sample data geometry if one is provided
    if config.data_inst_geom is not None:
        if config.verbose:
            print "Reading in sample data instrument geometry file"
            
        data_inst_geom_dst = DST.getInstance("application/x-NxsGeom",
                                             config.data_inst_geom)
    else:
        data_inst_geom_dst = None

    # Read in normalization data geometry if one is provided
    if config.norm_inst_geom is not None:
        if config.verbose:
            print "Reading in normalization instrument geometry file"
            
        norm_inst_geom_dst = DST.getInstance("application/x-NxsGeom",
                                        config.norm_inst_geom)
    else:
        norm_inst_geom_dst = None        
    
    # Perform Steps 1-6 on sample data
    d_som1 = dr_lib.process_ref_data(config.data, config,
                                     config.data_roi_file,
                                     config.dbkg_roi_file,
                                     config.no_bkg,
                                     tof_cuts=config.tof_cuts,
                                     inst_geom_dst=data_inst_geom_dst,
                                     timer=tim)

    # Perform Steps 1-6 on normalization data
    if config.norm is not None:
        n_som1 = dr_lib.process_ref_data(config.norm, config,
                                         config.norm_roi_file,
                                         config.nbkg_roi_file,
                                         config.no_norm_bkg,
                                         dataset_type="norm",
                                         tof_cuts=config.tof_cuts,
                                         inst_geom_dst=norm_inst_geom_dst,
                                         timer=tim)
    else:
        n_som1 = None

    if config.Q_bins is None and config.scatt_angle is not None:
        import copy
        tof_axis = copy.deepcopy(d_som1[0].axis[0].val)

    # Closing sample data instrument geometry file
    if data_inst_geom_dst is not None:
        data_inst_geom_dst.release_resource()

    # Closing normalization data instrument geometry file
    if norm_inst_geom_dst is not None:
        norm_inst_geom_dst.release_resource()        

    # Step 7: Sum all normalization spectra together
    if config.norm is not None:
        n_som2 = dr_lib.sum_all_spectra(n_som1)
    else:
        n_som2 = None

    del n_som1

    # Step 8: Divide data by normalization
    if config.verbose and config.norm is not None:
        print "Scale data by normalization"

    if config.norm is not None:
        d_som2 = common_lib.div_ncerr(d_som1, n_som2, length_one_som=True)
    else:
        d_som2 = d_som1

    if tim is not None and config.norm is not None:
        tim.getTime(msg="After normalizing signal spectra")

    del d_som1, n_som2

    if config.dump_rtof_comb:
        d_som2_1 = dr_lib.sum_all_spectra(d_som2)
        d_som2_2 = dr_lib.data_filter(d_som2_1)
        del d_som2_1

        if config.inst == "REF_M":
            tof_bc = utils.calc_bin_centers(d_som2_2[0].axis[0].val)
            d_som2_2[0].axis[0].val = tof_bc[0]
            d_som2_2.setDataSetType("density")
        
        hlr_utils.write_file(config.output, "text/Spec", d_som2_2,
                             output_ext="crtof",
                             verbose=config.verbose,
                             data_ext=config.ext_replacement,
                             path_replacement=config.path_replacement,
                             message="combined R(TOF) information")

        del d_som2_2

    if config.dump_rtof:
        if config.inst == "REF_M":
            d_som2_1 = d_som2
        else:
            d_som2_1 = dr_lib.filter_ref_data(d_som2)
        
        hlr_utils.write_file(config.output, "text/Spec", d_som2_1,
                             output_ext="rtof",
                             verbose=config.verbose,
                             data_ext=config.ext_replacement,
                             path_replacement=config.path_replacement,
                             message="R(TOF) information")
        del d_som2_1

    if config.inst == "REF_L":
        # Step 9: Convert TOF to scalar Q
        if config.verbose:
            print "Converting TOF to scalar Q"
    
        # Check to see if polar angle offset is necessary
        if config.angle_offset is not None:
            # Check on units, offset must be in radians
            p_temp = config.angle_offset.toFullTuple(True)
            if p_temp[2] == "degrees" or p_temp[2] == "degree":
                deg_to_rad =  (math.pi / 180.0)
                p_off_rads = p_temp[0] * deg_to_rad
                p_off_err2_rads = p_temp[1] * deg_to_rad * deg_to_rad
            else:
                p_off_rads = p_temp[0]
                p_off_err2_rads = p_temp[1]
    
            p_offset = (p_off_rads, p_off_err2_rads)
    
            d_som2.attr_list["angle_offset"] = config.angle_offset
        else:
            p_offset = None
    
        if tim is not None:
            tim.getTime(False)
    
        d_som3 = common_lib.tof_to_scalar_Q(d_som2, units="microsecond",
                                            angle_offset=p_offset,
                                            lojac=False)
    
        del d_som2
            
        if tim is not None:
            tim.getTime(msg="After converting wavelength to scalar Q ")
    
        if config.dump_rq:
            d_som3_1 = dr_lib.data_filter(d_som3, clean_axis=True)
            hlr_utils.write_file(config.output, "text/Spec", d_som3_1,
                                 output_ext="rq",
                                 verbose=config.verbose,
                                 data_ext=config.ext_replacement,
                                 path_replacement=config.path_replacement,
                                 message="pixel R(Q) information")
            del d_som3_1
                    
        if not config.no_filter:
            if config.verbose:
                print "Filtering final data"
            
            if tim is not None:
                tim.getTime(False)
            
            d_som4 = dr_lib.data_filter(d_som3)
    
            if tim is not None:
                tim.getTime(msg="After filtering data")
        else:
            d_som4 = d_som3
    
        del d_som3
    else:
        d_som4 = d_som2

    # Step 10: Rebin all spectra to final Q axis
    if config.Q_bins is None:
        if config.scatt_angle is None:
            config.Q_bins = dr_lib.create_axis_from_data(d_som4)
            rebin_axis = config.Q_bins.toNessiList()
        else:
            # Get scattering angle and make Q conversion from TOF axis
            # Check on units, scattering angle must be in radians
            sa_temp = config.scatt_angle.toFullTuple(True)
            if sa_temp[2] == "degrees" or sa_temp[2] == "degree":
                deg_to_rad =  (math.pi / 180.0)
                sa_rads = sa_temp[0] * deg_to_rad
                sa_err2_rads = sa_temp[1] * deg_to_rad * deg_to_rad
            else:
                sa_rads = sa_temp[0]
                sa_err2_rads = sa_temp[1]

            sa = (sa_rads, sa_err2_rads)

            pl = d_som4.attr_list.instrument.get_total_path(d_som4[0].id,
                                                            det_secondary=True)

            import nessi_list
            tof_axis_err2 = nessi_list.NessiList(len(tof_axis))

            rebin_axis = axis_manip.tof_to_scalar_Q(tof_axis,
                                                    tof_axis_err2,
                                                    pl[0], pl[1],
                                                    sa[0], sa[1])[0]

            axis_manip.reverse_array_nc(rebin_axis)            
    else:
        rebin_axis = config.Q_bins.toNessiList()

    if config.inst == "REF_L":
        if config.verbose:
            print "Rebinning spectra"

        if tim is not None:
            tim.getTime(False)
            
        d_som5 = common_lib.rebin_axis_1D_linint(d_som4, rebin_axis)
    
        if tim is not None:
            tim.getTime(msg="After rebinning spectra")
    
        del d_som4
    
        if config.dump_rqr:
            hlr_utils.write_file(config.output, "text/Spec", d_som5,
                                 output_ext="rqr",
                                 verbose=config.verbose,
                                 data_ext=config.ext_replacement,
                                 path_replacement=config.path_replacement,
                                 message="pixel R(Q) (after rebinning) "\
                                 +"information")
    
        # Step 11: Sum all rebinned spectra
        if config.verbose:
            print "Summing spectra"
    
        if tim is not None:
            tim.getTime(False)
    
        d_som6 = dr_lib.sum_all_spectra(d_som5)
    
        if tim is not None:
            tim.getTime(msg="After summing spectra")
    
        del d_som5
    else:
        d_som5 = d_som4

    if config.inst == "REF_M":
        d_som5A = dr_lib.sum_all_spectra(d_som5)
        del d_som5
        d_som6 = dr_lib.data_filter(d_som5A)
        del d_som5A
        axis_manip.reverse_array_nc(d_som6[0].y)
        axis_manip.reverse_array_nc(d_som6[0].var_y)

        d_som6.setYLabel("Intensity")
        d_som6.setYUnits("Counts/A-1")
        d_som6.setAllAxisLabels(["scalar wavevector transfer"])
        d_som6.setAllAxisUnits(["1/Angstroms"])

        Q_bc = utils.calc_bin_centers(rebin_axis)
        d_som6[0].axis[0].val = Q_bc[0]
        d_som6.setDataSetType("density")

    hlr_utils.write_file(config.output, "text/Spec", d_som6,
                         replace_ext=False,
                         replace_path=False,
                         verbose=config.verbose,
                         message="combined Reflectivity information")

    d_som6.attr_list["config"] = config

    hlr_utils.write_file(config.output, "text/rmd", d_som6,
                         output_ext="rmd", verbose=config.verbose,
                         data_ext=config.ext_replacement,
                         path_replacement=config.path_replacement,
                         message="metadata")

    if tim is not None:
        tim.setOldTime(old_time)
        tim.getTime(msg="Total Running Time")
Example #15
0
# responsibility for the accuracy, completeness, or usefulness of any
# information, apparatus, product, or process disclosed, or represents that
# its use would not infringe privately owned rights.
#

# $Id$

import DST
import SOM
import hlr_utils

filename = "numinfo.dat"

som = SOM.SOM()
so1 = SOM.SO()
so1.id = ("bank1", (0, 0))
so1.y = 111.0
so1.var_y = 23.0
som.append(so1)
so2 = SOM.SO()
so2.id = ("bank2", (0, 0))
so2.y = 143.0
so2.var_y = 27.0
som.append(so2)

ofile = open(filename, "w")

nif = DST.NumInfoDST(ofile)
nif.writeSOM(som)
nif.release_resource()
Example #16
0
def add_files_bg(filelist, **kwargs):
    """
    This function takes a list of U{NeXus<www.nexusformat.org>} files and
    various keyword arguments and returns a data C{SOM} and a background C{SOM}
    (if requested) that is the sum of all the data from the specified files.
    B{It is assumed that the files contain similar data as only crude
    cross-checks will be made. You have been warned.}

    @param filelist: A list containing the names of the files to sum
    @type filelist: C{list}
    
    @param kwargs: A list of keyword arguments that the function accepts:
    
    @keyword SO_Axis: This is the name of the main axis to read from the NeXus
                      file
    @type SO_Axis: C{string}
    
    @keyword Data_Paths: This contains the data paths and signals for the
                         requested detector banks
    @type Data_Paths: C{tuple} of C{tuple}s
    
    @keyword Signal_ROI: This is the name of a file that contains a list of
                         pixel IDs that will be read from the data file and
                         stored as a signal C{SOM}
    @type Signal_ROI: C{string}
    
    @keyword Bkg_ROI: This is the name of a file that contains a list of pixel
                      IDs that will be read from the data file and stored as a
                      background C{SOM}
    @type Bkg_ROI: C{string}
    
    @keyword dataset_type: The practical name of the dataset being processed.
                           The default value is I{data}.
    @type dataset_type: C{string}

    @keyword dst_type: The type of C{DST} to be created during file read-in.
                       The default value is I{application/x-NeXus}.
    @type dst_type: C{string}
    
    @keyword Verbose: This is a flag to turn on print statments. The default is
                      I{False}.
    @type Verbose: C{boolean}
    
    @keyword Timer: This is an SNS Timer object used for showing the
                    performance timing in the function.
    @type Timer: C{sns_timing.Timer}


    @return: Signal C{SOM.SOM} and background C{SOM.SOM}
    @rtype: C{tuple}

    
    @raise SystemExit: If any file cannot be read
    """
    import sys

    import common_lib
    import DST
    import hlr_utils

    # Parse keywords
    try:
        so_axis = kwargs["SO_Axis"]
    except KeyError:
        so_axis = "time_of_flight"

    try:
        data_paths = kwargs["Data_Paths"]
    except KeyError:
        data_paths = None

    try:
        signal_roi = kwargs["Signal_ROI"]
    except KeyError:
        signal_roi = None
    try:
        bkg_roi = kwargs["Bkg_ROI"]
    except KeyError:
        bkg_roi = None

    try:
        dataset_type = kwargs["dataset_type"]
    except KeyError:
        dataset_type = "data"

    try:
        dst_type = kwargs["dst_type"]
    except KeyError:
        try:
            dst_type = hlr_utils.file_peeker(filelist[0])
        except RuntimeError:
            # Assume it is a NeXus file, since it is not a DR produced file
            dst_type = "application/x-NeXus"

    try:
        verbose = kwargs["Verbose"]
    except KeyError:
        verbose = False

    try:
        timer = kwargs["Timer"]
    except KeyError:
        timer = None

    counter = 0

    for filename in filelist:
        if verbose:
            print "File:", filename

        try:
            if dst_type == "application/x-NeXus":
                data_dst = DST.getInstance(dst_type, filename)
            else:
                resource = open(filename, "r")
                data_dst = DST.getInstance(dst_type, resource)
        except SystemError:
            print "ERROR: Failed to data read file %s" % filename
            sys.exit(-1)

        if verbose:
            print "Reading data file %d" % counter

        if counter == 0:
            if dst_type == "application/x-NeXus":
                d_som1 = data_dst.getSOM(data_paths,
                                         so_axis,
                                         roi_file=signal_roi)
                d_som1.rekeyNxPars(dataset_type)
            else:
                if dst_type != "text/Dave2d":
                    d_som1 = data_dst.getSOM(data_paths, roi_file=signal_roi)
                else:
                    d_som1 = data_dst.getSOM(data_paths)

            if verbose:
                print "# Signal SO:", len(d_som1)
                if dst_type == "application/x-NeXus":
                    print "# TOF:", len(d_som1[0])
                    print "# TOF Axis:", len(d_som1[0].axis[0].val)
                elif dst_type != "text/num-info":
                    print "# Data Size:", len(d_som1[0])
                    print "# X-Axis:", len(d_som1[0].axis[0].val)
                    try:
                        axis_len = len(d_som1[0].axis[1].val)
                        print "# Y-Axis:", axis_len
                    except IndexError:
                        pass

            if bkg_roi is not None:
                if dst_type == "application/x-NeXus":
                    b_som1 = data_dst.getSOM(data_paths,
                                             so_axis,
                                             roi_file=bkg_roi)
                    b_som1.rekeyNxPars(dataset_type)
                else:
                    if dst_type != "text/Dave2d":
                        b_som1 = data_dst.getSOM(data_paths, roi_file=bkg_roi)
                    else:
                        b_som1 = data_dst.getSOM(data_paths)
                if verbose:
                    print "# Background SO:", len(b_som1)

            else:
                b_som1 = None

            if timer is not None:
                timer.getTime(msg="After reading data")

        else:
            if dst_type == "application/x-NeXus":
                d_som_t = data_dst.getSOM(data_paths,
                                          so_axis,
                                          roi_file=signal_roi)
                d_som_t.rekeyNxPars(dataset_type)
                add_nxpars_sig = True
            else:
                if dst_type != "text/Dave2d":
                    d_som_t = data_dst.getSOM(data_paths, roi_file=signal_roi)
                else:
                    d_som_t = data_dst.getSOM(data_paths)
                add_nxpars_sig = False

            if bkg_roi is not None:
                if dst_type == "application/x-NeXus":
                    b_som_t = data_dst.getSOM(data_paths,
                                              so_axis,
                                              roi_file=bkg_roi)
                    b_som_t.rekeyNxPars(dataset_type)
                    add_nxpars_bkg = True
                else:
                    if dst_type != "text/Dave2d":
                        b_som_t = data_dst.getSOM(data_paths, roi_file=bkg_roi)
                    else:
                        b_som_t = data_dst.getSOM(data_paths)
                    add_nxpars_bkg = False
            else:
                b_som_t = None
            if timer is not None:
                timer.getTime(msg="After reading data")

            d_som1 = common_lib.add_ncerr(d_som_t,
                                          d_som1,
                                          add_nxpars=add_nxpars_sig)
            if bkg_roi is not None:
                b_som1 = common_lib.add_ncerr(b_som_t,
                                              b_som1,
                                              add_nxpars=add_nxpars_bkg)

            if timer is not None:
                timer.getTime(msg="After adding spectra")

            del d_som_t
            if bkg_roi is not None:
                del b_som_t

            if timer is not None:
                timer.getTime(msg="After SOM deletion")

        data_dst.release_resource()
        del data_dst
        counter += 1

        if timer is not None:
            timer.getTime(msg="After resource release and DST deletion")

        if dst_type == "application/x-NeXus":
            som_key_parts = [dataset_type, "filename"]
            som_key = "-".join(som_key_parts)

            d_som1.attr_list[som_key] = filelist
            if b_som1 is not None:
                b_som1.attr_list[som_key] = filelist
        else:
            # Previously written files already have this structure imposed
            pass

    return (d_som1, b_som1)
Example #17
0
def run(config, tim=None):
    """
    This method is where the data reduction process gets done.

    @param config: Object containing the data reduction configuration
                   information.
    @type config: L{hlr_utils.Configure}

    @param tim: (OPTIONAL) Object that will allow the method to perform
                           timing evaluations.
    @type tim: C{sns_time.DiffTime}
    """
    import common_lib
    import dr_lib
    import DST

    if tim is not None:
        tim.getTime(False)
        old_time = tim.getOldTime()

    if config.data is None:
        raise RuntimeError("Need to pass a data filename to the driver " + "script.")

    # Read in geometry if one is provided
    if config.inst_geom is not None:
        if config.verbose:
            print "Reading in instrument geometry file"

        inst_geom_dst = DST.getInstance("application/x-NxsGeom", config.inst_geom)
    else:
        inst_geom_dst = None

    config.so_axis = "time_of_flight"

    # Steps 1-3: Produce a scaled summed dark current dataset
    dc_som = dr_lib.scaled_summed_data(config.dkcur, config, dataset_type="dark_current", timer=tim)

    # Perform Steps 3-6 on black can data
    if config.bcan is not None:
        b_som1 = dr_lib.calibrate_dgs_data(
            config.bcan,
            config,
            dc_som,
            dataset_type="black_can",
            inst_geom_dst=inst_geom_dst,
            tib_const=config.tib_const,
            cwp=config.cwp_bcan,
            timer=tim,
        )
    else:
        b_som1 = None

    # Perform Steps 3-6 on empty can data
    if config.ecan is not None:
        e_som1 = dr_lib.calibrate_dgs_data(
            config.ecan,
            config,
            dc_som,
            dataset_type="empty_can",
            inst_geom_dst=inst_geom_dst,
            tib_const=config.tib_const,
            cwp=config.cwp_ecan,
            timer=tim,
        )
    else:
        e_som1 = None

    # Perform Steps 3-6 on normalization data
    n_som1 = dr_lib.calibrate_dgs_data(
        config.data,
        config,
        dc_som,
        dataset_type="normalization",
        inst_geom_dst=inst_geom_dst,
        tib_const=config.tib_const,
        cwp=config.cwp_data,
        timer=tim,
    )

    # Perform Steps 7-16 on normalization data
    if config.norm_trans_coeff is None:
        norm_trans_coeff = None
    else:
        norm_trans_coeff = config.norm_trans_coeff.toValErrTuple()

    # Determine if we need to rebin the empty or black can data
    if config.ecan is not None and e_som1 is not None:
        ecan_cwp = True
    else:
        ecan_cwp = False

    if config.bcan is not None and b_som1 is not None:
        bcan_cwp = True
    else:
        bcan_cwp = False

    cwp_used = ecan_cwp or bcan_cwp

    n_som2 = dr_lib.process_dgs_data(
        n_som1, config, b_som1, e_som1, norm_trans_coeff, dataset_type="normalization", cwp_used=cwp_used, timer=tim
    )

    del n_som1, b_som1, e_som1

    # Step 17: Integrate normalization spectra
    if config.verbose:
        print "Integrating normalization spectra"

    if tim is not None:
        tim.getTime(False)

    if config.norm_int_range is None:
        start_val = float("inf")
        end_val = float("inf")
    else:
        if not config.wb_norm:
            # Translate energy transfer to final energy
            ef_start = config.initial_energy.getValue() - config.norm_int_range[0]
            ef_end = config.initial_energy.getValue() - config.norm_int_range[1]
            # Convert final energy to final wavelength
            start_val = common_lib.energy_to_wavelength((ef_start, 0.0))[0]
            end_val = common_lib.energy_to_wavelength((ef_end, 0.0))[0]
        else:
            start_val = config.norm_int_range[0]
            end_val = config.norm_int_range[1]

    n_som3 = dr_lib.integrate_spectra(n_som2, start=start_val, end=end_val, width=True)

    del n_som2

    if tim is not None:
        tim.getTime(msg="After integrating normalization spectra ")

    file_comment = "Normalization Integration range: %0.3fA, %0.3fA" % (start_val, end_val)

    hlr_utils.write_file(
        config.output,
        "text/num-info",
        n_som3,
        output_ext="norm",
        data_ext=config.ext_replacement,
        path_replacement=config.path_replacement,
        verbose=config.verbose,
        message="normalization values",
        comments=[file_comment],
        tag="Integral",
        units="counts",
    )

    if tim is not None:
        tim.getTime(False)

    if config.verbose:
        print "Making mask file"

    # Make mask file from threshold
    dr_lib.filter_normalization(n_som3, config.lo_threshold, config.hi_threshold, config)

    if tim is not None:
        tim.getTime(msg="After making mask file ")

    # Write out RMD file
    n_som3.attr_list["config"] = config

    hlr_utils.write_file(
        config.output,
        "text/rmd",
        n_som3,
        output_ext="rmd",
        data_ext=config.ext_replacement,
        path_replacement=config.path_replacement,
        verbose=config.verbose,
        message="metadata",
    )

    if tim is not None:
        tim.setOldTime(old_time)
        tim.getTime(msg="Total Running Time")
Example #18
0
# its use would not infringe privately owned rights.
#

# $Id$

import DST
from SOM import SOM
from SOM import SO
from time import localtime, strftime, time

filename_SOM1 = "stuff1.dat"

SOM1 = SOM()
SOM1.attr_list["filename"] = filename_SOM1
SOM1.attr_list["epoch"] = time()
SOM1.attr_list["timestamp"] = DST.make_ISO8601(SOM1.attr_list["epoch"])
SOM1.attr_list["username"] = "******"
SOM1.setAllAxisLabels(["Q", "E"])
SOM1.setAllAxisUnits(["A-1", "meV"])
SOM1.setYLabel("Intensity")
SOM1.setYUnits("Counts/(meV A-1))")

SO1 = SO(2)
SO1.id = 0
SO1.axis[0].val.extend(range(5))
SO1.axis[1].val.extend(range(10))

y_len = (len(SO1.axis[0].val) - 1) * (len(SO1.axis[1].val) - 1)
y = range(y_len)
SO1.y.extend(y)
SO1.var_y.extend(y)
Example #19
0
SOM1.setAllAxisLabels(["momentum transfer", "energy transfer"])
SOM1.setAllAxisUnits(["1/A", "meV"])
SOM1.attr_list["data-title"] = "Test S(Q,E)"
SOM1.attr_list["data-run_number"] = "1344"

DSample = Sample()
DSample.name = "Test Sample"
DSample.nature = "CoCo"
SOM1.attr_list.sample = DSample

x = NessiList()
y = NessiList()
z = NessiList()

x.extend(0, 1, 2, 3)
y.extend(0, 1, 2)
z.extend(1, 2, 3, 4, 5, 6)

SO1 = SO(2)
SO1.id = ("bank1", (4, 32))
SO1.y = z
SO1.var_y = z
SO1.axis[0].val = x
SO1.axis[1].val = y

SOM1.append(SO1)

rednxs = DST.RedNxsDST("test_red.nxs")
rednxs.writeSOM(SOM1, entry_name="sqe")
rednxs.release_resource()
Example #20
0
def run(config):
    """
    This method is where the data reduction process gets done.

    @param config: Object containing the data reduction configuration
                   information.
    @type config: L{hlr_utils.Configure}
    """
    import sys
    
    import dr_lib
    import DST
    import SOM

    banks = [("/entry/bank1", 1), ("/entry/bank2", 1)]

    max_ids = (64, 64)

    if config.vertical:
        tag = "v"
        size = max_ids[1]
        reps = max_ids[0] / config.pixel_group
        label = "Integrated pixel"
    else:
        tag = "h"
        size = max_ids[1] / config.pixel_group
        reps = max_ids[0] / config.sum_tubes
        label = "Tube Number"

    try:
        data_dst = DST.getInstance("application/x-NeXus",
                                   config.data)
    except SystemError:
        print "ERROR: Failed to data read file %s" % config.data
        sys.exit(-1)
        
    so_axis = "time_of_flight"

    for path in banks:
        bank = path[0].split('/')[-1]

        for i in range(size):

            tSOM = SOM.SOM()
            tSO = SOM.SO(construct=True)
            
            counter = 1
            for j in range(reps):

                if config.vertical:
                    starting_id = (i, config.pixel_group * j)
                    ending_id = (i + 1, config.pixel_group * (j + 1))
                else:
                    if config.sum_tubes == 1:
                        x1 = j
                        x2 = j + 1
                    else:
                        x1 = j * config.sum_tubes
                        x2 = (j + 1) * config.sum_tubes
                    
                    starting_id = (x1, config.pixel_group * i)
                    ending_id = (x2, config.pixel_group * (i + 1))

                d_som1 = data_dst.getSOM(path, so_axis,
                                         start_id=starting_id,
                                         end_id=ending_id)

                d_som2 = dr_lib.sum_all_spectra(d_som1)
                d_som2[0].id = d_som1[0].id

                d_som1 = None
                del d_som1

                value = dr_lib.integrate_axis(d_som2)

                if config.verbose:
                    print "Sum", d_som2[0].id, ":", value[0], value[1]

                tSO.axis[0].val.append(counter)
                tSO.y.append(value[0])
                tSO.var_y.append(value[1])
                if counter == 1:
                    tSO.id = d_som2[0].id

                counter += 1

            tSOM.attr_list["filename"] = config.data
            tSOM.setTitle("TOF Pixel Summation")
            tSOM.setDataSetType("density")
            tSOM.setYLabel("Intensity Sum")
            tSOM.setYUnits("counts")
            tSOM.setAxisLabel(0, label)
            tSOM.setAxisUnits(0, "")
            tSOM.append(tSO)

            tag1 = str(i + 1)
                    
            outfile = bank + "_" + tag + "_" + tag1 + ".tof"

            hlr_utils.write_file(outfile, "text/Spec", tSOM,
                                 verbose=config.verbose,
                                 message="intensity sum file",
                                 replace_ext=False)
                    
    data_dst.release_resource()
Example #21
0
filename = sys.argv[1]
try:
    temp = sys.argv[2]
    debug = True
except IndexError:
    debug = False

# Setup output file
outtag = filename.split('/')[-1].split('_')[0]
outfilename = outtag + "_geom.txt"
outfile = open(outfilename, "w")

timer = sns_timing.DiffTime()

data_dst = DST.getInstance("application/x-NeXus", filename)
timer.getTime(msg="After reading data ")

SOM_ids = data_dst.get_SOM_ids()

# Get the bank numbers sorted in proper order
bank_list = [SOM_id[0].split('/')[-1] for SOM_id in SOM_ids if SOM_id[1] == 1]
bank_nums = [
    int(id.replace('bank', '')) for id in bank_list
    if not id.startswith("monitor")
]
bank_nums.sort()

signsx = [-1.0, 1.0]
signsy1 = signsx
signsy2 = [1.0, -1.0]
Example #22
0
def run(config):
    import sys

    import DST
    
    if config.data is None:
        raise RuntimeError("Need to pass a data filename to the driver "\
                           +"script.")

    try:
        data_dst = DST.getInstance("application/x-NeXus", config.data)
    except SystemError:
        print "ERROR: Failed to data read file %s" % config.data
        sys.exit(-1)

    so_axis = "time_of_flight"

    if config.Q_bins is not None:
        rmd_written = False
    else:
        rmd_written = True

    if config.verbose:
        print "Reading data file"

    d_som1 = data_dst.getSOM(config.data_paths, so_axis)

    if config.det_geom is not None:
        if config.verbose:
            print "Reading in detector geometry file"
            
        det_geom_dst = DST.getInstance("application/x-NxsGeom",
                                       config.det_geom)
        det_geom_dst.setGeometry(config.data_paths, d_som1)
        det_geom_dst.release_resource()

    if config.d_bins is not None:

        if config.verbose:
            print "Converting TOF to d-spacing"

        d_som2 = convert_data_to_d_spacing(d_som1)

        if config.dump_pxl:
            hlr_utils.write_file(config.data, "text/Spec", d_som2,
                                 output_ext="dsp", verbose=config.verbose,
                                 message="pixel d-spacing information")

        d_som3 = common_lib.rebin_axis_1D(d_som2, config.d_bins)
    
        d_som4 = dr_lib.sum_all_spectra(d_som3)
        
        d_som4[0].id = ("bank3", (0, 0))

        hlr_utils.write_file(config.output_ds, "text/Spec", d_som4,
                             replace_ext=False, verbose=config.verbose,
                             message="combined d-spacing information")

        if config.verbose:
            print "Converting d-spacing to TOF focused detector"

        d_som5 = convert_data_to_tof_focused_det(config, d_som2)

        if config.dump_pxl:
            hlr_utils.write_file(config.data, "text/Spec", d_som5,
                                 output_ext="tfp", verbose=config.verbose,
                                 message="pixel TOF focused information")

        d_som6 = common_lib.rebin_axis_1D(d_som5, config.tof_bins)

        d_som7 = dr_lib.sum_all_spectra(d_som6)

        d_som7[0].id = config.pixel_id

        hlr_utils.write_file(config.output_tof, "text/GSAS", d_som7,
                             replace_ext=False, verbose=config.verbose,
                             message="combined TOF focused information")

        hlr_utils.write_file(config.data, "text/Spec", d_som7,
                             output_ext="toff",
                             verbose=config.verbose,
                             message="combined TOF focused information")

        d_som7.attr_list["config"] = config

        if rmd_written:
            hlr_utils.write_file(config.data, "text/rmd", d_som7,
                                 output_ext="rmd", verbose=config.verbose,
                                 message="metadata")
        
    else:
        pass

    if config.Q_bins is not None:
        if config.verbose:
            print "Converting TOF to Q"

        d_som2 = convert_data_to_scalar_Q(config, d_som1)
    
        if config.dump_pxl:
            hlr_utils.write_file(config.data, "text/Spec", d_som2,
                                 output_ext="qtp", verbose=config.verbose,
                                 message="pixel Q information")
        
        d_som3 = common_lib.rebin_axis_1D(d_som2, config.Q_bins)
    
        d_som4 = dr_lib.sum_all_spectra(d_som3)
        
        d_som4[0].id = ("bank3", (0, 0))

        hlr_utils.write_file(config.output_qt, "text/Spec", d_som4,
                             replace_ext=False, verbose=config.verbose,
                             message="combined Q information")

        hlr_utils.write_file(config.data, "text/rmd", d_som7,
                             output_ext="rmd", verbose=config.verbose,
                             message="metadata")
    else:
        pass
def run(config, tim=None):
    """
    This method is where the data reduction process gets done.

    @param config: Object containing the data reduction configuration
                   information.
    @type config: L{hlr_utils.Configure}

    @param tim: (OPTIONAL) Object that will allow the method to perform
                           timing evaluations.
    @type tim: C{sns_time.DiffTime}
    """
    import common_lib
    import dr_lib
    import DST

    if tim is not None:
        tim.getTime(False)
        old_time = tim.getOldTime()

    if config.data is None:
        raise RuntimeError("Need to pass a data filename to the driver "\
                           +"script.")

    # Read in geometry if one is provided
    if config.inst_geom is not None:
        if config.verbose:
            print "Reading in instrument geometry file"

        inst_geom_dst = DST.getInstance("application/x-NxsGeom",
                                        config.inst_geom)
    else:
        inst_geom_dst = None

    # Perform early background subtraction if the hwfix flag is used
    if config.hwfix:
        if not config.mc:
            so_axis = "time_of_flight"
        else:
            so_axis = "Time_of_Flight"

        bkg_som0 = dr_lib.add_files(config.back,
                                    Data_Paths=config.data_paths.toPath(),
                                    SO_Axis=so_axis,
                                    Signal_ROI=config.roi_file,
                                    dataset_type="background",
                                    Verbose=config.verbose,
                                    Timer=tim)

        bkg_som = dr_lib.fix_bin_contents(bkg_som0)
        del bkg_som0
    else:
        bkg_som = None

    # Perform Steps 1-15 on sample data
    d_som1 = dr_lib.process_igs_data(config.data,
                                     config,
                                     timer=tim,
                                     inst_geom_dst=inst_geom_dst,
                                     tib_const=config.tib_data_const,
                                     bkg_som=bkg_som)

    # Perform Steps 1-15 on empty can data
    if config.ecan is not None:
        e_som1 = dr_lib.process_igs_data(config.ecan,
                                         config,
                                         timer=tim,
                                         inst_geom_dst=inst_geom_dst,
                                         dataset_type="empty_can",
                                         tib_const=config.tib_ecan_const,
                                         bkg_som=bkg_som)
    else:
        e_som1 = None

    # Perform Steps 1-15 on normalization data
    if config.norm is not None:
        n_som1 = dr_lib.process_igs_data(config.norm,
                                         config,
                                         timer=tim,
                                         inst_geom_dst=inst_geom_dst,
                                         dataset_type="normalization",
                                         tib_const=config.tib_norm_const,
                                         bkg_som=bkg_som)
    else:
        n_som1 = None

    # Perform Steps 1-15 on background data
    if config.back is not None:
        b_som1 = dr_lib.process_igs_data(config.back,
                                         config,
                                         timer=tim,
                                         inst_geom_dst=inst_geom_dst,
                                         dataset_type="background",
                                         tib_const=config.tib_back_const,
                                         bkg_som=bkg_som)
    else:
        b_som1 = None

    # Perform Step 1-15 on direct scattering background data
    if config.dsback is not None:
        ds_som1 = dr_lib.process_igs_data(config.dsback,
                                          config,
                                          timer=tim,
                                          inst_geom_dst=inst_geom_dst,
                                          tib_const=config.tib_dsback_const,
                                          dataset_type="dsbackground",
                                          bkg_som=bkg_som)

        # Note: time_zero_slope MUST be a tuple
        if config.time_zero_slope is not None:
            ds_som1.attr_list["Time_zero_slope"] = \
                                      config.time_zero_slope.toValErrTuple()

        # Note: time_zero_offset MUST be a tuple
        if config.time_zero_offset is not None:
            ds_som1.attr_list["Time_zero_offset"] = \
                                      config.time_zero_offset.toValErrTuple()

        # Step 16: Linearly interpolate TOF elastic range in direct scattering
        #          background data

        # First convert TOF elastic range to appropriate pixel initial
        # wavelengths
        if config.verbose:
            print "Determining initial wavelength range for elastic line"

        if tim is not None:
            tim.getTime(False)

        if config.tof_elastic is None:
            # Units are in microseconds
            tof_elastic_range = (140300, 141300)
        else:
            tof_elastic_range = config.tof_elastic

        ctof_elastic_low = dr_lib.convert_single_to_list(\
               "tof_to_initial_wavelength_igs_lin_time_zero",
               (tof_elastic_range[0], 0.0),
               ds_som1)

        ctof_elastic_high = dr_lib.convert_single_to_list(\
               "tof_to_initial_wavelength_igs_lin_time_zero",
               (tof_elastic_range[1], 0.0),
               ds_som1)

        ctof_elastic_range = [(ctof_elastic_low[i][0], ctof_elastic_high[i][0])
                              for i in xrange(len(ctof_elastic_low))]

        if tim is not None:
            tim.getTime(msg="After calculating initial wavelength range for "\
                        +"elastic line ")

        del ctof_elastic_low, ctof_elastic_high

        if config.split:
            lambda_filter = [(d_som1[i].axis[0].val[0],
                              d_som1[i].axis[0].val[-1])
                             for i in xrange(len(d_som1))]
        else:
            lambda_filter = None

        # Now interpolate spectra between TOF elastic range (converted to
        # initial wavelength)
        if config.verbose:
            print "Linearly interpolating direct scattering spectra"

        if tim is not None:
            tim.getTime(False)

        ds_som2 = dr_lib.lin_interpolate_spectra(ds_som1,
                                                 ctof_elastic_range,
                                                 filter_axis=lambda_filter)

        if tim is not None:
            tim.getTime(msg="After linearly interpolating direct scattering "\
                        +"spectra ")

        if config.dump_dslin:
            ds_som2_1 = dr_lib.sum_all_spectra(ds_som2,\
                                  rebin_axis=config.lambda_bins.toNessiList())

            hlr_utils.write_file(config.output,
                                 "text/Spec",
                                 ds_som2_1,
                                 output_ext="lin",
                                 data_ext=config.ext_replacement,
                                 path_replacement=config.path_replacement,
                                 verbose=config.verbose,
                                 message="dsbackground linear interpolation")
            del ds_som2_1

        del ds_som1
    else:
        ds_som2 = None

    if inst_geom_dst is not None:
        inst_geom_dst.release_resource()

    # Steps 17-18: Subtract background spectrum from sample spectrum
    if config.dsback is None:
        back_som = b_som1
        bkg_type = "background"
    else:
        back_som = ds_som2
        bkg_type = "dsbackground"
    d_som2 = dr_lib.subtract_bkg_from_data(d_som1,
                                           back_som,
                                           verbose=config.verbose,
                                           timer=tim,
                                           dataset1="data",
                                           dataset2=bkg_type,
                                           scale=config.scale_bs)

    if config.dsback is not None:
        del ds_som2

    # Step 19: Zero region outside TOF elastic for background for empty can
    if config.dsback is None:
        bcs_som = b_som1
        cs_som = e_som1
    else:
        if config.verbose and b_som1 is not None:
            print "Zeroing background spectra"

        if tim is not None and b_som1 is not None:
            tim.getTime(False)

        bcs_som = dr_lib.zero_spectra(b_som1, ctof_elastic_range)

        if tim is not None and b_som1 is not None:
            tim.getTime(msg="After zeroing background spectra")

        if config.verbose and e_som1 is not None:
            print "Zeroing empty can spectra"

        if tim is not None and e_som1 is not None:
            tim.getTime(False)

        cs_som = dr_lib.zero_spectra(e_som1, ctof_elastic_range)

        if tim is not None and e_som1 is not None:
            tim.getTime(msg="After zeroing empty can spectra")

        del ctof_elastic_range

    # Steps 20-21: Subtract background spectrum from empty can spectrum
    e_som2 = dr_lib.subtract_bkg_from_data(cs_som,
                                           bcs_som,
                                           verbose=config.verbose,
                                           timer=tim,
                                           dataset1="data-empty_can",
                                           dataset2="background",
                                           scale=config.scale_bcs)

    # Steps 22-23: Subtract background spectrum from empty can spectrum for
    #              normalization

    try:
        config.pre_norm
    except AttributeError:
        config.pre_norm = False

    if not config.pre_norm:
        e_som3 = dr_lib.subtract_bkg_from_data(e_som1,
                                               b_som1,
                                               verbose=config.verbose,
                                               timer=tim,
                                               dataset1="norm-empty_can",
                                               dataset2="background",
                                               scale=config.scale_bcn)
    else:
        e_som3 = None

    # Steps 24-25: Subtract background spectrum from normalization spectrum
    if not config.pre_norm:
        n_som2 = dr_lib.subtract_bkg_from_data(n_som1,
                                               b_som1,
                                               verbose=config.verbose,
                                               timer=tim,
                                               dataset1="normalization",
                                               dataset2="background",
                                               scale=config.scale_bn)
    else:
        n_som2 = n_som1

    del b_som1, e_som1, bcs_som, cs_som

    # Steps 26-27: Subtract empty can spectrum from sample spectrum
    d_som3 = dr_lib.subtract_bkg_from_data(d_som2,
                                           e_som2,
                                           verbose=config.verbose,
                                           timer=tim,
                                           dataset1="data",
                                           dataset2="empty_can",
                                           scale=config.scale_cs)

    del d_som2, e_som2

    # Steps 28-29: Subtract empty can spectrum from normalization spectrum
    if not config.pre_norm:
        n_som3 = dr_lib.subtract_bkg_from_data(n_som2,
                                               e_som3,
                                               verbose=config.verbose,
                                               timer=tim,
                                               dataset1="normalization",
                                               dataset2="empty_can",
                                               scale=config.scale_cn)
    else:
        n_som3 = n_som2

    del n_som2, e_som3

    # Step 30-31: Integrate normalization spectra
    if config.verbose and n_som3 is not None and not config.pre_norm:
        print "Integrating normalization spectra"

    if not config.pre_norm:
        norm_int = dr_lib.integrate_spectra(n_som3,
                                            start=config.norm_start,
                                            end=config.norm_end,
                                            norm=True)
    else:
        norm_int = n_som3

    del n_som3

    # Step 32: Normalize data by integrated values
    if config.verbose and norm_int is not None:
        print "Normalizing data by normalization data"

    if norm_int is not None:
        d_som4 = common_lib.div_ncerr(d_som3, norm_int)
    else:
        d_som4 = d_som3

    if norm_int is not None:
        if tim is not None:
            tim.getTime(msg="After normalizing data ")

    del d_som3, norm_int

    if config.dump_norm:
        if tim is not None:
            tim.getTime(False)

        hlr_utils.write_file(config.output,
                             "text/Spec",
                             d_som4,
                             output_ext="wvn",
                             data_ext=config.ext_replacement,
                             path_replacement=config.path_replacement,
                             verbose=config.verbose,
                             message="wavelength (vanadium norm) information")

        if tim is not None:
            tim.getTime(msg="After writing wavelength (vanadium norm) info ")

    # Steps 33 to end: Creating S(Q,E)
    if config.Q_bins is not None:
        if config.verbose:
            print "Creating 2D spectrum"

        if tim is not None:
            tim.getTime(False)

        d_som5 = dr_lib.create_E_vs_Q_igs(
            d_som4,
            config.E_bins.toNessiList(),
            config.Q_bins.toNessiList(),
            so_id="Full Detector",
            y_label="counts",
            y_units="counts / (ueV * A^-1)",
            x_labels=["Q transfer", "energy transfer"],
            x_units=["1/Angstroms", "ueV"],
            split=config.split,
            Q_filter=False,
            configure=config)
        if tim is not None:
            tim.getTime(msg="After creation of final spectrum ")

        del d_som4

    # Steps 33 to 36: Create S(-cos(polar), E)
    elif config.ncospol_bins is not None:
        if config.verbose:
            print "Convert wavelength to energy transfer"

        if tim is not None:
            tim.getTime(False)

        d_som4a = dr_lib.energy_transfer(d_som4,
                                         "IGS",
                                         "Wavelength_final",
                                         sa_norm=True,
                                         scale=True,
                                         change_units=True)

        if tim is not None:
            tim.getTime(msg="After wavelength to energy transfer conversion ")

        del d_som4

        if config.verbose:
            print "Creating 2D spectrum"

        if tim is not None:
            tim.getTime(False)

        d_som5 = dr_lib.create_param_vs_Y(
            d_som4a,
            "polar",
            "negcos_param_array",
            config.ncospol_bins.toNessiList(),
            rebin_axis=config.E_bins.toNessiList(),
            y_label="counts",
            y_units="counts / ueV",
            x_labels=["-cos(polar)", "Energy Transfer"],
            x_units=["", "ueV"])

        if tim is not None:
            tim.getTime(msg="After creation of final spectrum ")

    # If rescaling factor present, rescale the data
    if config.rescale_final is not None and not config.split:
        d_som6 = common_lib.mult_ncerr(d_som5, (config.rescale_final, 0.0))
    else:
        d_som6 = d_som5

    if tim is None:
        old_time = None

    if not __name__ == "amorphous_reduction_sqe":
        del d_som5
        __write_output(d_som6, config, tim, old_time)
    else:
        if config.create_output:
            del d_som5
            __write_output(d_som6, config, tim, old_time)
        else:
            return d_som6
Example #24
0
def run(config):
    """
    This method is where the data reduction process gets done.

    @param config: Object containing the data reduction configuration
                   information.
    @type config: L{hlr_utils.Configure}
    """
    import sys

    import dr_lib
    import DST

    try:
        data_dst = DST.getInstance("application/x-NeXus", config.data[0])
    except SystemError:
        print "ERROR: Failed to data read file %s" % config.data[0]
        sys.exit(-1)

    so_axis = "time_of_flight"

    if config.verbose:
        print "Reading data file"

    if config.roi_file is None:
        d_som0 = data_dst.getSOM(config.data_paths.toPath(), so_axis,
                                 start_id=config.starting_ids,
                                 end_id=config.ending_ids)
    else:
        d_som0 = data_dst.getSOM(config.data_paths.toPath(), so_axis,
                                 roi_file=config.roi_file)

    if config.width:
        d_som1 = dr_lib.fix_bin_contents(d_som0)
    else:
        d_som1 = d_som0

    del d_som0

    if config.dump_pxl:
        hlr_utils.write_file(config.data[0], "text/Spec", d_som1,
                             output_ext="tfp", verbose=config.verbose,
                             path_replacement=config.path_replacement,
                             message="pixel TOF information")
    else:
        pass

    if config.tib_const is not None:
        import common_lib
        d_som2 = common_lib.sub_ncerr(d_som1, config.tib_const.toValErrTuple())

        if config.dump_sxl:
            hlr_utils.write_file(config.data[0], "text/Spec", d_som2,
                                 output_ext="tsp", verbose=config.verbose,
                                 path_replacement=config.path_replacement,
                                 message="TIB const sub pixel TOF information")
        
    else:
        d_som2 = d_som1

    del d_som1

    if len(d_som2) == 1:
        if config.verbose:
            print "Summing 1 spectrum."        
        d_som3 = d_som2
    else:
        if config.verbose:
            print "Summing %d spectra." % len(d_som2)
        d_som3 = dr_lib.sum_all_spectra(d_som2)
        d_som3[0].id = d_som2[0].id

    del d_som2

    hlr_utils.write_file(config.output, "text/Spec", d_som3, replace_ext=False,
                         verbose=config.verbose,
                         path_replacement=config.path_replacement,
                         message="combined TOF information")
Example #25
0
# warranty, express or implied, or assumes any legal liability or
# responsibility for the accuracy, completeness, or usefulness of any
# information, apparatus, product, or process disclosed, or represents that
# its use would not infringe privately owned rights.
#

# $Id$

import sys
import DST

filename = sys.argv[1]

ifile = open(filename, "r")

d2d = DST.Dave2dDST(ifile)

som = d2d.getSOM()

d2d.release_resource()
x_index = 1
y_index = 31
N_y = len(som[0].axis[1].val)
print "N_y:", N_y
channel = y_index + (x_index * N_y)
print "channel:", channel
print som.attr_list
print "Y Label:", som.getYLabel()
print "Y Units:", som.getYUnits()
print "X Labels:", som.getAllAxisLabels()
print "X Units:", som.getAllAxisUnits()
Example #26
0
def run(config, tim=None):
    """
    This method is where the data reduction process gets done.

    @param config: Object containing the data reduction configuration
                   information.
    @type config: L{hlr_utils.Configure}

    @param tim: (OPTIONAL) Object that will allow the method to perform
                           timing evaluations.
    @type tim: C{sns_time.DiffTime}
    """
    import dr_lib
    import DST
    
    if tim is not None:
        tim.getTime(False)
        old_time = tim.getOldTime()

    if config.data is None:
        raise RuntimeError("Need to pass a data filename to the driver "\
                           +"script.")

    # Read in geometry if one is provided
    if config.inst_geom is not None:
        if config.verbose:
            print "Reading in instrument geometry file"
            
        inst_geom_dst = DST.getInstance("application/x-NxsGeom",
                                        config.inst_geom)
    else:
        inst_geom_dst = None

    # Perform Steps 1-11 on sample data
    d_som1 = dr_lib.process_sas_data(config.data, config, timer=tim,
                                     inst_geom_dst=inst_geom_dst,
                                     bkg_subtract=config.bkg_coeff,
                     acc_down_time=config.data_acc_down_time.toValErrTuple(),
                                     bkg_scale=config.bkg_scale,
                                     trans_data=config.data_trans)

    # Perform Steps 1-11 on buffer/solvent only data
    if config.solv is not None:
        s_som1 = dr_lib.process_sas_data(config.solv, config, timer=tim,
                                         inst_geom_dst=inst_geom_dst,
                                         dataset_type="solvent",
                                         bkg_subtract=config.bkg_coeff,
                     acc_down_time=config.solv_acc_down_time.toValErrTuple(),
                                         bkg_scale=config.bkg_scale,
                                         trans_data=config.solv_trans)
    else:
        s_som1 = None

    # Step 12: Subtract buffer/solvent only spectrum from sample spectrum
    d_som2 = dr_lib.subtract_bkg_from_data(d_som1, s_som1,
                                           verbose=config.verbose,
                                           timer=tim,
                                           dataset1="data",
                                           dataset2="solvent")
    
    del s_som1, d_som1

    # Perform Steps 1-11 on empty-can data
    if config.ecan is not None:
        e_som1 = dr_lib.process_sas_data(config.ecan, config, timer=tim,
                                         inst_geom_dst=inst_geom_dst,
                                         dataset_type="empty_can",
                                         bkg_subtract=config.bkg_coeff,
                     acc_down_time=config.ecan_acc_down_time.toValErrTuple(),
                                         bkg_scale=config.bkg_scale,
                                         trans_data=config.ecan_trans)
    else:
        e_som1 = None

    # Step 13: Subtract empty-can spectrum from sample spectrum
    d_som3 = dr_lib.subtract_bkg_from_data(d_som2, e_som1,
                                           verbose=config.verbose,
                                           timer=tim,
                                           dataset1="data",
                                           dataset2="empty_can")
    
    del e_som1, d_som2

    # Perform Steps 1-11 on open beam data
    if config.open is not None:
        o_som1 = dr_lib.process_sas_data(config.open, config, timer=tim,
                                         inst_geom_dst=inst_geom_dst,
                                         dataset_type="open_beam",
                                         bkg_subtract=config.bkg_coeff,
                     acc_down_time=config.open_acc_down_time.toValErrTuple(),
                                         bkg_scale=config.bkg_scale)
    else:
        o_som1 = None
        
    # Step 14: Subtract open beam spectrum from sample spectrum
    d_som4 = dr_lib.subtract_bkg_from_data(d_som3, o_som1,
                                           verbose=config.verbose,
                                           timer=tim,
                                           dataset1="data",
                                           dataset2="open_beam")
    
    del o_som1, d_som3

    # Perform Steps 1-11 on dark current data
    if config.dkcur is not None:
        dc_som1 = dr_lib.process_sas_data(config.open, config, timer=tim,
                                          inst_geom_dst=inst_geom_dst,
                                          dataset_type="dark_current",
                                          bkg_subtract=config.bkg_coeff)
    else:
        dc_som1 = None
        
    # Step 15: Subtract dark current spectrum from sample spectrum
    d_som5 = dr_lib.subtract_bkg_from_data(d_som4, dc_som1,
                                           verbose=config.verbose,
                                           timer=tim,
                                           dataset1="data",
                                           dataset2="dark_current")
    
    del dc_som1, d_som4    

    # Create 2D distributions is necessary
    if config.dump_Q_r:
        d_som5_1 = dr_lib.create_param_vs_Y(d_som5, "radius", "param_array",
                                       config.r_bins.toNessiList(),
                                       rebin_axis=config.Q_bins.toNessiList(),
                                       binnorm=True,
                                       y_label="S",
                                       y_units="Counts / A^-1 m",
                                       x_labels=["Radius", "Q"],
                                       x_units=["m", "1/Angstroms"])

        hlr_utils.write_file(config.output, "text/Dave2d", d_som5_1,
                             output_ext="qvr", verbose=config.verbose,
                             data_ext=config.ext_replacement,
                             path_replacement=config.path_replacement,
                             message="S(r, Q) information")

        del d_som5_1
        
    if config.dump_Q_theta:
        d_som5_1 = dr_lib.create_param_vs_Y(d_som5, "polar", "param_array",
                                       config.theta_bins.toNessiList(),
                                       rebin_axis=config.Q_bins.toNessiList(),
                                       binnorm=True,
                                       y_label="S",
                                       y_units="Counts / A^-1 rads",
                                       x_labels=["Polar Angle", "Q"],
                                       x_units=["rads", "1/Angstroms"])

        hlr_utils.write_file(config.output, "text/Dave2d", d_som5_1,
                             output_ext="qvt", verbose=config.verbose,
                             data_ext=config.ext_replacement,
                             path_replacement=config.path_replacement,
                             message="S(theta, Q) information")

        del d_som5_1
        
    # Steps 16 and 17: Rebin and sum all spectra
    if config.verbose:
        print "Rebinning and summing for final spectrum"
            
    if tim is not None:
        tim.getTime(False)

    if config.dump_frac_rebin:
        set_conf = config
    else:
        set_conf = None

    d_som6 = dr_lib.sum_by_rebin_frac(d_som5, config.Q_bins.toNessiList(),
                                      configure=set_conf)

    if tim is not None:
        tim.getTime(msg="After rebinning and summing for spectrum")    

    del d_som5

    if config.facility == "LENS":
        # Step 18: Scale final spectrum by Q bin centers
        if config.verbose:
            print "Scaling final spectrum by Q centers"
        
        if tim is not None:
            tim.getTime(False)

        d_som7 = dr_lib.fix_bin_contents(d_som6, scale=True, width=True,
                                         units="1/Angstroms")

        if tim is not None:
            tim.getTime(msg="After scaling final spectrum")    
    else:
        d_som7 = d_som6

    del d_som6

    # If rescaling factor present, rescale the data
    if config.rescale_final is not None:
        import common_lib
        d_som8 = common_lib.mult_ncerr(d_som7, (config.rescale_final, 0.0))
    else:
        d_som8 = d_som7

    del d_som7
    
    hlr_utils.write_file(config.output, "text/Spec", d_som8,
                         verbose=config.verbose,
                         replace_path=False,
                         replace_ext=False,
                         message="combined S(Q) information")

    # Create 1D canSAS file
    hlr_utils.write_file(config.output, "text/canSAS", d_som8,
                         verbose=config.verbose,
                         output_ext="xml",
                         data_ext=config.ext_replacement,         
                         path_replacement=config.path_replacement,
                         message="combined S(Q) information")
    
    d_som8.attr_list["config"] = config

    hlr_utils.write_file(config.output, "text/rmd", d_som8,
                         output_ext="rmd",
                         data_ext=config.ext_replacement,         
                         path_replacement=config.path_replacement,
                         verbose=config.verbose,
                         message="metadata")

    if tim is not None:
        tim.setOldTime(old_time)
        tim.getTime(msg="Total Running Time")
Example #27
0
def run(config, tim):
    """
    This method is where the data reduction process gets done.

    @param config: Object containing the data reduction configuration
                   information.
    @type config: L{hlr_utils.Configure}

    @param tim: Object that will allow the method to perform timing
                evaluations.
    @type tim: C{sns_time.DiffTime}
    """
    import DST
    import math
    if config.inst == "REF_M":
        import axis_manip
        import utils

    if tim is not None:
        tim.getTime(False)
        old_time = tim.getOldTime()

    if config.data is None:
        raise RuntimeError("Need to pass a data filename to the driver "\
                           +"script.")

    # Read in sample data geometry if one is provided
    if config.data_inst_geom is not None:
        if config.verbose:
            print "Reading in sample data instrument geometry file"

        data_inst_geom_dst = DST.getInstance("application/x-NxsGeom",
                                             config.data_inst_geom)
    else:
        data_inst_geom_dst = None

    # Read in normalization data geometry if one is provided
    if config.norm_inst_geom is not None:
        if config.verbose:
            print "Reading in normalization instrument geometry file"

        norm_inst_geom_dst = DST.getInstance("application/x-NxsGeom",
                                             config.norm_inst_geom)
    else:
        norm_inst_geom_dst = None

    # Perform Steps 1-6 on sample data
    d_som1 = dr_lib.process_ref_data(config.data,
                                     config,
                                     config.data_roi_file,
                                     config.dbkg_roi_file,
                                     config.no_bkg,
                                     tof_cuts=config.tof_cuts,
                                     inst_geom_dst=data_inst_geom_dst,
                                     no_tof_cuts=True,
                                     timer=tim)

    # Perform Steps 1-6 on normalization data
    if config.norm is not None:
        n_som1 = dr_lib.process_ref_data(config.norm,
                                         config,
                                         config.norm_roi_file,
                                         config.nbkg_roi_file,
                                         config.no_norm_bkg,
                                         dataset_type="norm",
                                         tof_cuts=config.tof_cuts,
                                         inst_geom_dst=norm_inst_geom_dst,
                                         no_tof_cuts=True,
                                         timer=tim)
    else:
        n_som1 = None

    if config.Q_bins is None and config.scatt_angle is not None:
        import copy
        tof_axis = copy.deepcopy(d_som1[0].axis[0].val)

    # Closing sample data instrument geometry file
    if data_inst_geom_dst is not None:
        data_inst_geom_dst.release_resource()

    # Closing normalization data instrument geometry file
    if norm_inst_geom_dst is not None:
        norm_inst_geom_dst.release_resource()

    # Step 7: Sum all normalization spectra together
    if config.norm is not None:
        n_som2 = dr_lib.sum_all_spectra(n_som1)
    else:
        n_som2 = None

    del n_som1

    # Step 8: Divide data by normalization
    if config.verbose and config.norm is not None:
        print "Scale data by normalization"

    if config.norm is not None:
        d_som2 = common_lib.div_ncerr(d_som1, n_som2, length_one_som=True)
    else:
        d_som2 = d_som1

    if tim is not None and config.norm is not None:
        tim.getTime(msg="After normalizing signal spectra")

    del d_som1, n_som2

    if config.dump_rtof_comb:
        d_som2_1 = dr_lib.sum_all_spectra(d_som2)
        d_som2_2 = dr_lib.data_filter(d_som2_1)
        del d_som2_1

        if config.inst == "REF_M":
            tof_bc = utils.calc_bin_centers(d_som2_2[0].axis[0].val)
            d_som2_2[0].axis[0].val = tof_bc[0]
            d_som2_2.setDataSetType("density")

        d_som2_3 = dr_lib.cut_spectra(d_som2_2, config.tof_cut_min,
                                      config.tof_cut_max)
        del d_som2_2

        hlr_utils.write_file(config.output,
                             "text/Spec",
                             d_som2_3,
                             output_ext="crtof",
                             verbose=config.verbose,
                             data_ext=config.ext_replacement,
                             path_replacement=config.path_replacement,
                             message="combined R(TOF) information")
        del d_som2_3

    if config.dump_rtof:
        if config.inst == "REF_M":
            d_som2_1 = d_som2
        else:
            d_som2_1 = dr_lib.filter_ref_data(d_som2)

        d_som2_2 = dr_lib.cut_spectra(d_som2_1, config.tof_cut_min,
                                      config.tof_cut_max)
        del d_som2_1
        hlr_utils.write_file(config.output,
                             "text/Spec",
                             d_som2_2,
                             output_ext="rtof",
                             verbose=config.verbose,
                             data_ext=config.ext_replacement,
                             path_replacement=config.path_replacement,
                             message="R(TOF) information")
        del d_som2_2

    # Step 9: Convert TOF to scalar Q
    if config.verbose:
        print "Converting TOF to scalar Q"
        if config.beamdiv_corr:
            print "Applying beam divergence correction"

    # Check to see if polar angle offset is necessary
    if config.angle_offset is not None:
        # Check on units, offset must be in radians
        p_offset = hlr_utils.angle_to_radians(config.angle_offset)
        d_som2.attr_list["angle_offset"] = config.angle_offset
    else:
        p_offset = None

    # Check to see if scattering angle is requested
    if config.scatt_angle is not None:
        # Mainly used by REF_M
        scatt_angle = hlr_utils.angle_to_radians(config.scatt_angle)
        scatt_angle = (scatt_angle[0] / 2.0, scatt_angle[1])
    else:
        scatt_angle = None

    if tim is not None:
        tim.getTime(False)

    d_som3 = dr_lib.tof_to_ref_scalar_Q(d_som2,
                                        units="microsecond",
                                        angle_offset=p_offset,
                                        lojac=False,
                                        polar=scatt_angle,
                                        configure=config)

    del d_som2

    if tim is not None:
        tim.getTime(msg="After converting wavelength to scalar Q ")

    # Calculate the Q cut range from the TOF cuts range
    if scatt_angle is not None:
        polar_angle = (scatt_angle[0] / 2.0, scatt_angle[1])
    else:
        polar_angle = (d_som3.attr_list["data-theta"][0], 0)

    if p_offset is not None:
        polar_angle = (polar_angle[0] + p_offset[0],
                       polar_angle[1] + p_offset[1])

    pl = d_som3.attr_list.instrument.get_total_path(det_secondary=True)
    # Since Q ~ 1/T, need to reverse cut designation
    if config.tof_cut_min is not None:
        Q_cut_max = dr_lib.tof_to_ref_scalar_Q(
            (float(config.tof_cut_min), 0.0), pathlength=pl,
            polar=polar_angle)[0]
    else:
        Q_cut_max = None

    if config.tof_cut_max is not None:
        Q_cut_min = dr_lib.tof_to_ref_scalar_Q(
            (float(config.tof_cut_max), 0.0), pathlength=pl,
            polar=polar_angle)[0]
    else:
        Q_cut_min = None

    if config.dump_rq:
        d_som3_1 = dr_lib.data_filter(d_som3, clean_axis=True)
        d_som3_2 = dr_lib.cut_spectra(d_som3_1, Q_cut_min, Q_cut_max)
        del d_som3_1
        hlr_utils.write_file(config.output,
                             "text/Spec",
                             d_som3_2,
                             output_ext="rq",
                             verbose=config.verbose,
                             data_ext=config.ext_replacement,
                             path_replacement=config.path_replacement,
                             message="pixel R(Q) information")
        del d_som3_2

    if config.Q_bins is not None or config.beamdiv_corr:
        if config.verbose:
            print "Rebinning data"
        d_som4 = common_lib.rebin_axis_1D_frac(d_som3,
                                               config.Q_bins.toNessiList())

        if config.dump_rqr:
            d_som4_1 = dr_lib.data_filter(d_som4, clean_axis=True)
            d_som4_2 = dr_lib.cut_spectra(d_som4_1, Q_cut_min, Q_cut_max)
            del d_som4_1
            hlr_utils.write_file(config.output,
                                 "text/Spec",
                                 d_som4_2,
                                 output_ext="rqr",
                                 verbose=config.verbose,
                                 data_ext=config.ext_replacement,
                                 path_replacement=config.path_replacement,
                                 message="rebinned pixel R(Q) information")
            del d_som4_2
    else:
        d_som4 = d_som3

    del d_som3

    if not config.no_filter:
        if config.verbose:
            print "Filtering final data"

        if tim is not None:
            tim.getTime(False)

        d_som5 = dr_lib.data_filter(d_som4)

        if tim is not None:
            tim.getTime(msg="After filtering data")
    else:
        d_som5 = d_som4

    del d_som4

    # Sum all spectra since everything is on same axis
    d_som6 = dr_lib.sum_all_spectra(d_som5)

    del d_som5

    d_som7 = dr_lib.cut_spectra(d_som6,
                                Q_cut_min,
                                Q_cut_max,
                                num_bins_clean=config.num_bins_clean)

    del d_som6

    hlr_utils.write_file(config.output,
                         "text/Spec",
                         d_som7,
                         replace_ext=False,
                         replace_path=False,
                         verbose=config.verbose,
                         message="combined Reflectivity information")

    d_som7.attr_list["config"] = config

    hlr_utils.write_file(config.output,
                         "text/rmd",
                         d_som7,
                         output_ext="rmd",
                         verbose=config.verbose,
                         data_ext=config.ext_replacement,
                         path_replacement=config.path_replacement,
                         message="metadata")

    if tim is not None:
        tim.setOldTime(old_time)
        tim.getTime(msg="Total Running Time")
Example #28
0
        SO1.id = i
        SO1.axis[0].val.append(j+1)
        if options.withXvar:
            SO1.axis[0].var.append(j+1)
        SO1.y.append(1000+j+(20*j))
        SO1.var_y.append(100+j)
        if options.extraSom:
            SO2.id = i
            SO2.axis[0].val.append((j+1)*1.0579838)
            SO2.y.append(1000+j+(20*j))
            SO2.var_y.append(100+j)
        
    SO1.axis[0].val.append(11)
    if options.withXvar:
        SO1.axis[0].var.append(11)

    if options.extraSom:
        SO2.axis[0].val.append(11*1.0579838)
    
    SOM1.append(SO1)
    if options.extraSom:
        SOM2.append(SO2)

file = open(filename_SOM1, "w")

a3c = DST.Ascii3ColDST(file)
a3c.writeSOM(SOM1, extra_som=SOM2)
a3c.release_resource()


Example #29
0
# to reproduce, prepare derivative works, and distribute copies to the public
# for any purpose and without fee.
#
# This material was prepared as an account of work sponsored by an agency of
# the United States Government.  Neither the United States Government nor the
# United States Department of Energy, nor any of their employees, makes any
# warranty, express or implied, or assumes any legal liability or
# responsibility for the accuracy, completeness, or usefulness of any
# information, apparatus, product, or process disclosed, or represents that
# its use would not infringe privately owned rights.
#

# $Id$

import DST
from SOM import SOM
import hlr_utils

filename = "test.xml"

SOM = SOM()
conf = hlr_utils.Configure()
conf.verbose = True
SOM.attr_list["config"] = conf

ofile = open(filename, "w")

mdw = DST.MdwDST(ofile)
mdw.writeSOM(SOM)
mdw.release_resource()
Example #30
0
def run(config, tim=None):
    """
    This method is where the data reduction process gets done.

    @param config: Object containing the data reduction configuration
                   information.
    @type config: L{hlr_utils.Configure}

    @param tim: (OPTIONAL) Object that will allow the method to perform
                           timing evaluations.
    @type tim: C{sns_time.DiffTime}
    """
    import common_lib
    import dr_lib
    import DST
    
    if tim is not None:
        tim.getTime(False)
        old_time = tim.getOldTime()

    if config.data is None:
        raise RuntimeError("Need to pass a data filename to the driver "\
                           +"script.")

    # Read in geometry if one is provided
    if config.inst_geom is not None:
        if config.verbose:
            print "Reading in instrument geometry file"
            
        inst_geom_dst = DST.getInstance("application/x-NxsGeom",
                                        config.inst_geom)
    else:
        inst_geom_dst = None

    only_background = False
    data_type = "transmission"
        
    # Perform Steps 1,6-7 or 1,3,5-7 on sample data
    d_som1 = dr_lib.process_sas_data(config.data, config, timer=tim,
                                     inst_geom_dst=inst_geom_dst,
                                     dataset_type=data_type,
                                     transmission=True,
                                     get_background=only_background)

    # Perform Steps 1,6-7 on background data
    if config.back is not None:
        b_som1 = dr_lib.process_sas_data(config.back, config, timer=tim,
                                         inst_geom_dst=inst_geom_dst,
                                         dataset_type="trans_bkg",
                                         transmission=True)
    else:
        b_som1 = None

    # Put the datasets on the same axis
    d_som2 = dr_lib.sum_by_rebin_frac(d_som1, config.lambda_bins.toNessiList())
    del d_som1

    if b_som1 is not None:
        b_som2 = dr_lib.sum_by_rebin_frac(b_som1,
                                          config.lambda_bins.toNessiList())
    else:
        b_som2 = None
        
    del b_som1    
    
    # Divide the data spectrum by the background spectrum
    if b_som2 is not None:
        d_som3 = common_lib.div_ncerr(d_som2, b_som2)
    else:
        d_som3 = d_som2

    del d_som2, b_som2

    # Reset y units to dimensionless for the tranmission due to ratio
    if config.back is not None:
        d_som3.setYLabel("Ratio")
        d_som3.setYUnits("")
        write_message = "transmission spectrum"
    else:
        write_message = "spectrum for background estimation"

    # Write out the transmission spectrum
    hlr_utils.write_file(config.output, "text/Spec", d_som3,
                         verbose=config.verbose,
                         replace_path=False,
                         replace_ext=False,
                         message=write_message)

    d_som3.attr_list["config"] = config

    hlr_utils.write_file(config.output, "text/rmd", d_som3,
                         output_ext="rmd",
                         data_ext=config.ext_replacement,         
                         path_replacement=config.path_replacement,
                         verbose=config.verbose,
                         message="metadata")

    if tim is not None:
        tim.setOldTime(old_time)
        tim.getTime(msg="Total Running Time")    
Example #31
0
DSample = Sample()
DSample.name = "Test Sample"
DSample.nature = "K3NO+"
SOM1.attr_list.sample = DSample

DInst = Instrument(instrument="SANS",
                   primary=(15.0, 0.0),
                   det_secondary=(2.0, 0.0),
                   x_pix_offset=[
                       (),
                       (),
                   ])
SOM1.attr_list.instrument = DInst

length = 10
SO1 = SO(construct=True, withXVar=True)
for i in range(length):
    SO1.axis[0].val.append(float(i))
    SO1.y.append(float(100 * i))
    SO1.var_y.append(float(100 * i))

SO1.axis[0].val.append(length)

SOM1.append(SO1)

ifile = open("test_cansas1d.xml", "w")

cs1d = DST.CanSas1dDST(ifile)
cs1d.writeSOM(SOM1)
cs1d.release_resource()
Example #32
0
def run(config, tim=None):
    """
    This method is where the data reduction process gets done.

    @param config: Object containing the data reduction configuration
                   information.
    @type config: L{hlr_utils.Configure}

    @param tim: (OPTIONAL) Object that will allow the method to perform
                           timing evaluations.
    @type tim: C{sns_time.DiffTime}
    """
    import common_lib
    import dr_lib
    import DST
    
    if tim is not None:
        tim.getTime(False)
        old_time = tim.getOldTime()

    if config.data is None:
        raise RuntimeError("Need to pass a data filename to the driver "\
                           +"script.")

    # Read in geometry if one is provided
    if config.inst_geom is not None:
        if config.verbose:
            print "Reading in instrument geometry file"
            
        inst_geom_dst = DST.getInstance("application/x-NxsGeom",
                                        config.inst_geom)
    else:
        inst_geom_dst = None

    config.so_axis = "time_of_flight"

    try:
        if type(config.mask_file) == type([]):
            if len(config.mask_file) > 1:
                if config.verbose:
                    print "Creating combined mask file"

                if tim is not None:
                    tim.getTime(False)
        
                config.mask_file = hlr_utils.merge_roi_files(\
                        config.mask_file,
                        config)

                if tim is not None:
                    tim.getTime(msg="After creating combined mask file")
            else:
                config.mask_file = config.mask_file[0]
        else:
            # Do nothing since it's already a string
            pass
    except TypeError:
        # No mask files provided, do nothing
        pass

    # Steps 1-3: Produce a scaled summed dark current dataset
    dc_som = dr_lib.scaled_summed_data(config.dkcur, config,
                                       dataset_type="dark_current",
                                       timer=tim)

    # Perform Steps 3-6 on black can data
    if config.bcan is not None:
        b_som1 = dr_lib.calibrate_dgs_data(config.bcan, config, dc_som,
                                           dataset_type="black_can",
                                           inst_geom_dst=inst_geom_dst,
                                           tib_const=config.tib_const,
                                           cwp=config.cwp_bcan,
                                           timer=tim)
    else:
        b_som1 = None

    # Perform Steps 3-6 on empty can data    
    if config.ecan is not None:
        e_som1 = dr_lib.calibrate_dgs_data(config.ecan, config, dc_som,
                                           dataset_type="empty_can",
                                           inst_geom_dst=inst_geom_dst,
                                           tib_const=config.tib_const,
                                           cwp=config.cwp_ecan,
                                           timer=tim)
    else:
        e_som1 = None

    # Perform Steps 3-6 on sample data
    d_som1 = dr_lib.calibrate_dgs_data(config.data, config, dc_som,
                                       inst_geom_dst=inst_geom_dst,
                                       tib_const=config.tib_const,
                                       cwp=config.cwp_data,
                                       timer=tim)

    # Perform Steps 7-16 on sample data
    if config.data_trans_coeff is None:
        data_trans_coeff = None
    else:
        data_trans_coeff = config.data_trans_coeff.toValErrTuple()

    # Determine if we need to rebin the empty or black can data
    if config.ecan is not None and e_som1 is not None:
        ecan_cwp = True
    else:
        ecan_cwp = False

    if config.bcan is not None and b_som1 is not None:
        bcan_cwp = True
    else:
        bcan_cwp = False        

    cwp_used = ecan_cwp or bcan_cwp
    
    d_som2 = dr_lib.process_dgs_data(d_som1, config, b_som1, e_som1,
                                     data_trans_coeff, cwp_used=cwp_used,
                                     timer=tim)

    del d_som1

    del b_som1, e_som1

    # Step 18: Normalize sample data by integrated values
    if config.norm is not None:
        if config.verbose:
            print "Reading normalization information"

        norm_int = dr_lib.add_files(config.norm, Signal_ROI=config.roi_file,
                                    Signal_MASK=config.mask_file,
                                    dataset_type="normalization",
                                    dst_type="text/num-info",
                                    Verbose=config.verbose,
                                    Timer=tim)
        
        # Make the labels and units compatible with a NeXus file based SOM
        norm_int.setAllAxisLabels(["wavelength"])
        norm_int.setAllAxisUnits(["Angstroms"])
        norm_int.setYLabel("Intensity")
        norm_int.setYUnits("Counts/Angstroms")
        
        if config.verbose:
            print "Normalizing data by normalization data"

        if tim is not None:
            tim.getTime(False)

        d_som3 = common_lib.div_ncerr(d_som2, norm_int)            

        if tim is not None:
            tim.getTime(msg="After normalizing data ")

        del norm_int
    else:
        d_som3 = d_som2

    del d_som2

    # Step 19: Calculate the initial energy
    if config.initial_energy is not None:
        d_som3.attr_list["Initial_Energy"] = config.initial_energy

    # Steps 20-21: Calculate the energy transfer
    if config.verbose:
        print "Calculating energy transfer"

    if tim is not None:
        tim.getTime(False)

    d_som4 = dr_lib.energy_transfer(d_som3, "DGS", "Initial_Energy",
                                    lojac=True, scale=config.lambda_ratio)
    
    if tim is not None:
        tim.getTime(msg="After calculating energy transfer ")

    del d_som3

    # Step 22: Rebin energy transfer spectra
    if config.verbose:
        print "Rebinning to final energy transfer axis"

    if tim is not None:
        tim.getTime(False)
        
    d_som5 = common_lib.rebin_axis_1D_frac(d_som4, config.E_bins.toNessiList())

    if tim is not None:
        tim.getTime(msg="After rebinning energy transfer ")

    del d_som4

    if config.dump_et_comb:
        d_som5_1 = dr_lib.sum_all_spectra(d_som5)
        hlr_utils.write_file(config.output, "text/Spec", d_som5_1,
                             output_ext="et",
                             data_ext=config.ext_replacement,    
                             path_replacement=config.path_replacement,
                             verbose=config.verbose,
                             message="combined energy transfer information")

        del d_som5_1

    # Get the corner geometry information
    if config.verbose:
        print "Reading in corner geometry information"
        
    if tim is not None:
        tim.getTime(False)
        
    corner_angles = hlr_utils.get_corner_geometry(config.corner_geom)

    if tim is not None:
        tim.getTime(msg="After reading in corner geometry information ")

    if config.make_spe:
        d_som5.attr_list["corner_geom"] = corner_angles

        hlr_utils.write_file(config.output, "text/PHX", d_som5,
                             output_ext="phx",
                             data_ext=config.ext_replacement,    
                             path_replacement=config.path_replacement,
                             verbose=config.verbose,
                             message="PHX information")

        hlr_utils.write_file(config.output, "text/PAR", d_som5,
                             output_ext="par",
                             data_ext=config.ext_replacement,    
                             path_replacement=config.path_replacement,
                             verbose=config.verbose,
                             message="PAR information")

        hlr_utils.write_file(config.output, "text/SPE", d_som5,
                             output_ext="spe",
                             data_ext=config.ext_replacement,    
                             path_replacement=config.path_replacement,
                             verbose=config.verbose,
                             message="SPE information")

    # Steps 23-34: Create S(Q, E) distribution
    if config.verbose:
        print "Creating S(Q, E)"

    if tim is not None:
        tim.getTime(False)
        
    d_som5_2 = dr_lib.create_E_vs_Q_dgs(d_som5,
                                        config.initial_energy.toValErrTuple(),
                                        config.Q_bins.toNessiList(),
                                        corner_angles=corner_angles,
                                        split=config.split,
                                        configure=config,
                                        timer=tim)

    # Writing 2D DAVE file
    if not config.split:
        hlr_utils.write_file(config.output, "text/Dave2d", d_som5_2,
                             output_ext="sqe",
                             data_ext=config.ext_replacement,    
                             path_replacement=config.path_replacement,
                             verbose=config.verbose,
                             message="S(Q,E)")

        hlr_utils.write_file(config.output, "application/x-RedNxs", d_som5_2,
                             output_ext="nxs",
                             data_ext=config.ext_replacement,    
                             path_replacement=config.path_replacement,
                             verbose=config.verbose,
                             extra_tag="sqe",
                             getsom_kwargs={"entry_name": "sqe"},
                             message="NeXus S(Q,E)")
                                               
    if tim is not None:
        tim.getTime(msg="After calculating S(Q,E) spectrum ")    

    del d_som5_2

    if config.qmesh:
        # Steps 23-27,35-36: Create S(Qvec, E) distribution
        if config.verbose:
            print "Creating S(Qvec, E)"

        if tim is not None:
            tim.getTime(False)
        
        dr_lib.create_Qvec_vs_E_dgs(d_som5,
                                    config.initial_energy.toValErrTuple(),
                                    config, corner_angles=corner_angles,
                                    make_fixed=config.fixed,
                                    output=config.output,
                                    timer=tim)
        
        if tim is not None:
            tim.getTime(msg="After calculating final spectrum ")    

    # Write out RMD file
    d_som5.attr_list["config"] = config

    hlr_utils.write_file(config.output, "text/rmd", d_som5,
                         output_ext="rmd",
                         data_ext=config.ext_replacement,         
                         path_replacement=config.path_replacement,
                         verbose=config.verbose,
                         message="metadata")
    
    if tim is not None:
        tim.setOldTime(old_time)
        tim.getTime(msg="Total Running Time")
Example #33
0
def run(config, tim):
    """
    This method is where the data reduction process gets done.

    @param config: Object containing the data reduction configuration
                   information.
    @type config: L{hlr_utils.Configure}

    @param tim: Object that will allow the method to perform timing
                evaluations.
    @type tim: C{sns_time.DiffTime}
    """
    import array_manip
    import common_lib
    import dr_lib
    import DST
    import SOM

    import math

    if tim is not None:
        tim.getTime(False)
        old_time = tim.getOldTime()

    if config.data is None:
        raise RuntimeError("Need to pass a data filename to the driver "\
                           +"script.")

    # Read in sample data geometry if one is provided
    if config.data_inst_geom is not None:
        if config.verbose:
            print "Reading in sample data instrument geometry file"

        data_inst_geom_dst = DST.getInstance("application/x-NxsGeom",
                                             config.data_inst_geom)
    else:
        data_inst_geom_dst = None

    # Read in normalization data geometry if one is provided
    if config.norm_inst_geom is not None:
        if config.verbose:
            print "Reading in normalization instrument geometry file"
            
        norm_inst_geom_dst = DST.getInstance("application/x-NxsGeom",
                                             config.norm_inst_geom)
    else:
        norm_inst_geom_dst = None        
    
    # Perform Steps 1-2 on sample data
    d_som1 = dr_lib.process_reflp_data(config.data, config, None,
                                       config.dbkg_roi_file,
                                       config.no_bkg,
                                       inst_geom_dst=data_inst_geom_dst,
                                       timer=tim)

    # Get the detector angle
    if config.omega is None:
        # Make a fake SO
        so = SOM.SO()
        try: 
            theta = hlr_utils.get_special(d_som1.attr_list["Theta"], so)
        except KeyError: 
            theta = (float('nan'), float('nan'))
    else:
        theta = config.omega.toFullTuple()
        
    if theta[0] is not None: 
        if theta[2] == "degrees" or theta[2] == "degree": 
            theta_rads = (theta[0] * (math.pi / 180.0), 0.0)
        else: 
            theta_rads = (theta[0], 0.0)
    else: 
        theta_rads = (float('nan'), float('nan'))

    d_som1.attr_list["data-theta"] = (theta_rads[0], theta_rads[1], "radians")

    # Perform Steps 1-3 on normalization data
    if config.norm is not None:
        n_som1 = dr_lib.process_reflp_data(config.norm, config,
                                           config.norm_roi_file,
                                           config.nbkg_roi_file,
                                           config.no_norm_bkg,
                                           inst_geom_dst=norm_inst_geom_dst,
                                           timer=tim)
    else:
        n_som1 = None

    # Closing sample data instrument geometry file
    if data_inst_geom_dst is not None:
        data_inst_geom_dst.release_resource()

    # Closing normalization data instrument geometry file
    if norm_inst_geom_dst is not None:
        norm_inst_geom_dst.release_resource()        

    # Step 4: Divide data by normalization
    if config.verbose and config.norm is not None:
        print "Scale data by normalization"

    if tim is not None:
        tim.getTime(False)

    if config.norm is not None:
        # Need to rebin the normalization spectra to the data pixel spectra
        n_som2 = dr_lib.rebin_monitor(n_som1, d_som1, rtype="frac")
        # Now divide the spectra
        d_som2 = common_lib.div_ncerr(d_som1, n_som2)
        del n_som2
    else:
        d_som2 = d_som1

    if tim is not None and config.norm is not None:
        tim.getTime(msg="After normalizing signal spectra")

    del d_som1, n_som1

    sin_theta_rads = (math.sin(theta_rads[0]), math.sin(theta_rads[1]))
    if sin_theta_rads[0] < 0.0:
        sin_theta_rads = (math.fabs(sin_theta_rads[0]),
                          math.fabs(sin_theta_rads[1]))

    # Step 6: Scale wavelength axis by sin(theta) to make lambda_T
    if config.verbose:
        print "Scaling wavelength axis by sin(theta)"
    
    if tim is not None:
        tim.getTime(False)
        
    d_som3 = common_lib.div_ncerr(d_som2, sin_theta_rads, axis="x")

    if tim is not None:
        tim.getTime(msg="After scaling wavelength axis ")

    del d_som2

    d_som3.setAxisLabel(0, "lambda_T")

    # Step 7: Rebin to lambda_T axis
    if config.verbose:
        print "Rebinning spectra"

    if config.lambdap_bins is None:
        # Create a binning scheme
        pathlength = d_som3.attr_list.instrument.get_total_path(
            det_secondary=True)

        delta_lambda = common_lib.tof_to_wavelength((config.delta_TOF, 0.0),
                                                    pathlength=pathlength)
 
        delta_lambdap = array_manip.div_ncerr(delta_lambda[0], delta_lambda[1],
                                              sin_theta_rads[0], 0.0)

        config.lambdap_bins = dr_lib.create_axis_from_data(d_som3,
                                                       width=delta_lambdap[0])
    else:
        # Do nothing, got the binning scheme
        pass

    if tim is not None:
        tim.getTime(False)

    d_som4 = common_lib.rebin_axis_1D_frac(d_som3,
                                           config.lambdap_bins.toNessiList())

    if tim is not None:
        tim.getTime(msg="After rebinning spectra ")

    del d_som3

    if config.inst == "REF_M":
        # Clean up spectrum
        if config.tof_cut_min is not None:
            tof_cut_min = float(config.tof_cut_min)
        else:
            tof_cut_min = config.TOF_min

        if config.tof_cut_max is not None:
            tof_cut_max = float(config.tof_cut_max)
        else:
            tof_cut_max = config.TOF_max

        pathlength = d_som4.attr_list.instrument.get_total_path(
            det_secondary=True)

        lambda_min = common_lib.tof_to_wavelength((tof_cut_min, 0.0),
                                                  pathlength=pathlength)

        lambda_T_min = common_lib.div_ncerr(lambda_min, sin_theta_rads)
        
        lambda_max = common_lib.tof_to_wavelength((tof_cut_max, 0.0),
                                                  pathlength=pathlength)

        lambda_T_max = common_lib.div_ncerr(lambda_max, sin_theta_rads)

        nz_list = []
        for i in xrange(hlr_utils.get_length(d_som4)):
            nz_list.append((lambda_T_min[0], lambda_T_max[0]))
        
        d_som4A = dr_lib.zero_spectra(d_som4, nz_list)
    else:
        d_som4A = d_som4

    del d_som4

    # Step 8: Write out all spectra to a file
    hlr_utils.write_file(config.output, "text/Spec", d_som4A,
                         replace_ext=False,
                         replace_path=False,
                         verbose=config.verbose,
                         message="Reflectivity information")

    if config.dump_twod:
        d_som5 = dr_lib.create_X_vs_pixpos(d_som4A,
                                           config.lambdap_bins.toNessiList(),
                                           rebin=False,
                                           y_label="R",
                                           y_units="",
                                           x_label="$\lambda_T$",
                                           x_units="$\AA$")

        hlr_utils.write_file(config.output, "text/Dave2d", d_som5,
                             output_ext="plp", verbose=config.verbose,
                             data_ext=config.ext_replacement,
                             path_replacement=config.path_replacement,
                             message="2D Reflectivity information")

    d_som4A.attr_list["config"] = config

    hlr_utils.write_file(config.output, "text/rmd", d_som4A,
                         output_ext="rmd", verbose=config.verbose,
                         data_ext=config.ext_replacement,
                         path_replacement=config.path_replacement,
                         message="metadata")

    if tim is not None:
        tim.setOldTime(old_time)
        tim.getTime(msg="Total Running Time")    
Example #34
0
def run(config, tim):
    """
    This method is where the data reduction process gets done.

    @param config: Object containing the data reduction configuration
                   information.
    @type config: L{hlr_utils.Configure}

    @param tim: Object that will allow the method to perform
    timing evaluations.
    @type tim: C{sns_time.DiffTime}
    """
    import DST

    if tim is not None:
        tim.getTime(False)
        old_time = tim.getOldTime()

    if config.data is None:
        raise RuntimeError("Need to pass a data filename to the driver "\
                           +"script.")

    # Read in geometry if one is provided
    if config.inst_geom is not None:
        if config.verbose:
            print "Reading in instrument geometry file"
            
        inst_geom_dst = DST.getInstance("application/x-NxsGeom",
                                        config.inst_geom)
    else:
        inst_geom_dst = None

    # Perform early background subtraction if the hwfix flag is used
    if config.hwfix:
        if not config.mc:
            so_axis = "time_of_flight"
        else:
            so_axis = "Time_of_Flight"
            
        bkg_som0 = dr_lib.add_files(config.back,
                                    Data_Paths=config.data_paths.toPath(),
                                    SO_Axis=so_axis,
                                    Signal_ROI=config.roi_file,
                                    dataset_type="background",
                                    Verbose=config.verbose, Timer=tim)

        bkg_som = dr_lib.fix_bin_contents(bkg_som0)
        del bkg_som0
    else:
        bkg_som = None

    # Perform Steps 1-8,15 on normalization data            
    n_som1 = dr_lib.process_igs_data(config.data, config, timer=tim,
                                     inst_geom_dst=inst_geom_dst,
                                     dataset_type="normalization",
                                     tib_const=config.tib_norm_const,
                                     bkg_som=bkg_som)
    
    # Perform Steps 1-8,15 on empty can data
    if config.ecan is not None:
        e_som1 = dr_lib.process_igs_data(config.ecan, config, timer=tim,
                                         inst_geom_dst=inst_geom_dst,
                                         dataset_type="empty_can",
                                         tib_const=config.tib_ecan_const,
                                         bkg_som=bkg_som)
    else:
        e_som1 = None

    # Perform Steps 1-8,15 on background data
    if config.back is not None and not config.hwfix:
        b_som1 = dr_lib.process_igs_data(config.back, config, timer=tim,
                                         inst_geom_dst=inst_geom_dst,
                                         dataset_type="background",
                                         tib_const=config.tib_back_const)
    else:
        b_som1 = None

    if inst_geom_dst is not None:
        inst_geom_dst.release_resource()

    # Steps 22-23: Subtract background spectrum from empty can spectrum for
    #              normalization
    e_som2 = dr_lib.subtract_bkg_from_data(e_som1, b_som1,
                                           verbose=config.verbose,
                                           timer=tim,
                                           dataset1="empty_can",
                                           dataset2="background",
                                           scale=config.scale_bcn)

    # Step 24-25: Subtract background spectrum from normalization spectrum
    n_som2 = dr_lib.subtract_bkg_from_data(n_som1, b_som1,
                                           verbose=config.verbose,
                                           timer=tim,
                                           dataset1="normalization",
                                           dataset2="background",
                                           scale=config.scale_bn)
    del b_som1

    # Step 28-29: Subtract empty can spectrum from normalization spectrum
    n_som3 = dr_lib.subtract_bkg_from_data(n_som2, e_som2,
                                           verbose=config.verbose,
                                           timer=tim,
                                           dataset1="normalization",
                                           dataset2="empty_can",
                                           scale=config.scale_cn)

    del n_som2, e_som2

    # Step 30-31: Integrate normalization spectra
    if config.verbose:
        print "Integrating normalization spectra"

    norm_int = dr_lib.integrate_spectra(n_som3, start=config.norm_start,
                                        end=config.norm_end, norm=True)

    n_som3.attr_list["config"] = config
    
    hlr_utils.write_file(config.output, "text/rmd", n_som3,
                         output_ext="rmd",
                         data_ext=config.ext_replacement,         
                         path_replacement=config.path_replacement,
                         verbose=config.verbose,
                         message="metadata")
    
    del n_som3

    file_comment = "Normalization Integration range: %0.3fA, %0.3fA" % \
                   (config.norm_start, config.norm_end)

    hlr_utils.write_file(config.output, "text/num-info", norm_int,
                         output_ext="norm",
                         data_ext=config.ext_replacement,
                         path_replacement=config.path_replacement,
                         verbose=config.verbose,
                         message="normalization values",
                         comments=[file_comment],
                         tag="Integral", units="counts")

    if tim is not None:
        tim.setOldTime(old_time)
        tim.getTime(msg="Total Running Time")
Example #35
0
def add_files_dm(filelist, **kwargs):
    """
    This function takes a list of U{NeXus<www.nexusformat.org>} files and
    various keyword arguments and returns a data C{SOM} and a monitor C{SOM}
    that is the sum of all the data from the specified files. B{It is assumed
    that the files contain similar data as only crude cross-checks will be
    made. You have been warned.}

    @param filelist: A list containing the names of the files to sum
    @type filelist: C{list}
    
    @param kwargs: A list of keyword arguments that the function accepts:
    
    @keyword SO_Axis: This is the name of the main axis to read from the NeXus
                      file
    @type SO_Axis: C{string}
    
    @keyword Data_Paths: This contains the data paths and signals for the
                         requested detector banks
    @type Data_Paths: C{tuple} of C{tuple}s

    @keyword Mon_Paths: This contains the data paths and signals for the
                        requested monitor banks
    @type Mon_Paths: C{tuple} of C{tuple}s    
    
    @keyword Signal_ROI: This is the name of a file that contains a list of
                         pixel IDs that will be read from the data file and
                         stored as a signal C{SOM}
    @type Signal_ROI: C{string}

    @keyword Signal_MASK: This is the name of a file that contains a list of
                         pixel IDs that will be read from the data file and
                         stored as a signal C{SOM}
    @type Signal_MASK: C{string}

    @keyword dataset_type: The practical name of the dataset being processed.
                           The default value is I{data}.
    @type dataset_type: C{string}

    @keyword dataset_cwp: A set of chopper phase corrections for the dataset.
                          This will instruct the function to shift the TOF
                          axes of mulitple datasets and perform rebinning. The
                          TOF axis for the first dataset is the one that all
                          other datasets will be rebinned to.
    @type dataset_cwp: C{list} of C{float}s

    @keyword Verbose: This is a flag to turn on print statments. The default is
                      I{False}.
    @type Verbose: C{boolean}
    
    @keyword Timer: This is an SNS Timer object used for showing the
                    performance timing in the function.
    @type Timer: C{sns_timing.Timer}


    @return: Signal C{SOM.SOM} and monitor C{SOM.SOM}
    @rtype: C{tuple}

    
    @raise SystemExit: If any file cannot be read
    @raise RuntimeError: If both a ROI and MASK file are specified
    """
    import sys

    import common_lib
    import DST

    # Parse keywords
    try:
        so_axis = kwargs["SO_Axis"]
    except KeyError:
        so_axis = "time_of_flight"

    try:
        data_paths = kwargs["Data_Paths"]
    except KeyError:
        data_paths = None

    try:
        mon_paths = kwargs["Mon_Paths"]
    except KeyError:
        mon_paths = None

    try:
        signal_roi = kwargs["Signal_ROI"]
    except KeyError:
        signal_roi = None

    try:
        signal_mask = kwargs["Signal_MASK"]
    except KeyError:
        signal_mask = None

    try:
        dataset_type = kwargs["dataset_type"]
    except KeyError:
        dataset_type = "data"

    try:
        verbose = kwargs["Verbose"]
    except KeyError:
        verbose = False

    try:
        timer = kwargs["Timer"]
    except KeyError:
        timer = None

    dataset_cwp = kwargs.get("dataset_cwp")

    if signal_roi is not None and signal_mask is not None:
        raise RuntimeError("Cannot specify both ROI and MASK file! Please "\
                           +"choose!")

    dst_type = "application/x-NeXus"
    counter = 0

    for filename in filelist:
        if verbose:
            print "File:", filename
            if dataset_cwp is not None:
                print "TOF Offset:", dataset_cwp[counter]

        if dataset_cwp is not None:
            cwp = dataset_cwp[counter]
        else:
            cwp = None

        try:
            data_dst = DST.getInstance(dst_type, filename)
        except SystemError:
            print "ERROR: Failed to data read file %s" % filename
            sys.exit(-1)

        if timer is not None:
            timer.getTime(msg="After parsing file")

        if verbose:
            print "Reading data file %d" % counter

        if counter == 0:
            d_som1 = data_dst.getSOM(data_paths,
                                     so_axis,
                                     roi_file=signal_roi,
                                     mask_file=signal_mask,
                                     tof_offset=cwp)
            d_som1.rekeyNxPars(dataset_type)

            if verbose:
                print "# Signal SO:", len(d_som1)
                try:
                    print "# TOF:", len(d_som1[0])
                    print "# TOF Axis:", len(d_som1[0].axis[0].val)
                except IndexError:
                    # No data is present so say so again
                    print "information is unavailable since no data "\
                          +"present. Exiting."
                    sys.exit(0)

            if timer is not None:
                timer.getTime(msg="After reading data")

            if mon_paths is not None:
                if verbose:
                    print "Reading monitor %d" % counter

                if counter == 0:
                    m_som1 = data_dst.getSOM(mon_paths,
                                             so_axis,
                                             tof_offset=cwp)
                    m_som1.rekeyNxPars(dataset_type)

                if verbose:
                    print "# Monitor SO:", len(m_som1)
                    print "# TOF:", len(m_som1[0])
                    print "# TOF Axis:", len(m_som1[0].axis[0].val)

                if timer is not None:
                    timer.getTime(msg="After reading monitor data")
            else:
                m_som1 = None
        else:
            d_som_t0 = data_dst.getSOM(data_paths,
                                       so_axis,
                                       roi_file=signal_roi,
                                       mask_file=signal_mask,
                                       tof_offset=cwp)
            d_som_t0.rekeyNxPars(dataset_type)

            if timer is not None:
                timer.getTime(msg="After reading data")

            if dataset_cwp is not None:
                d_som_t = common_lib.rebin_axis_1D_frac(
                    d_som_t0, d_som1[0].axis[0].val)
                del d_som_t0
            else:
                d_som_t = d_som_t0

            d_som1 = common_lib.add_ncerr(d_som_t, d_som1, add_nxpars=True)

            if timer is not None:
                timer.getTime(msg="After adding data spectra")

            del d_som_t

            if timer is not None:
                timer.getTime(msg="After data SOM deletion")

            if mon_paths is not None:
                m_som_t0 = data_dst.getSOM(mon_paths, so_axis, tof_offset=cwp)
                m_som_t0.rekeyNxPars(dataset_type)

                if timer is not None:
                    timer.getTime(msg="After reading monitor data")

                if dataset_cwp is not None:
                    m_som_t = common_lib.rebin_axis_1D_frac(
                        m_som_t0, m_som1[0].axis[0].val)
                    del m_som_t0
                else:
                    m_som_t = m_som_t0

                m_som1 = common_lib.add_ncerr(m_som_t, m_som1, add_nxpars=True)

                if timer is not None:
                    timer.getTime(msg="After adding monitor spectra")

                del m_som_t

                if timer is not None:
                    timer.getTime(msg="After monitor SOM deletion")

        data_dst.release_resource()
        del data_dst
        counter += 1

        if timer is not None:
            timer.getTime(msg="After resource release and DST deletion")

        som_key_parts = [dataset_type, "filename"]
        som_key = "-".join(som_key_parts)

        d_som1.attr_list[som_key] = filelist
        if m_som1 is not None:
            m_som1.attr_list[som_key] = filelist

    return (d_som1, m_som1)
Example #36
0
def run(config, tim=None):
    """
    This method is where the data reduction process gets done.

    @param config: Object containing the data reduction configuration
                   information.
    @type config: L{hlr_utils.Configure}

    @param tim: (OPTIONAL) Object that will allow the method to perform
                           timing evaluations.
    @type tim: C{sns_time.DiffTime}
    """
    import common_lib
    import dr_lib
    import DST
    
    if tim is not None:
        tim.getTime(False)
        old_time = tim.getOldTime()

    if config.data is None:
        raise RuntimeError("Need to pass a data filename to the driver "\
                           +"script.")

    # Read in geometry if one is provided
    if config.inst_geom is not None:
        if config.verbose:
            print "Reading in instrument geometry file"
            
        inst_geom_dst = DST.getInstance("application/x-NxsGeom",
                                        config.inst_geom)
    else:
        inst_geom_dst = None

    config.so_axis = "time_of_flight"

    # Steps 1-3: Produce a scaled summed dark current dataset
    dc_som = dr_lib.scaled_summed_data(config.dkcur, config,
                                       dataset_type="dark_current",
                                       timer=tim)

    # Perform Steps 3-6 on black can data
    if config.bcan is not None:
        b_som1 = dr_lib.calibrate_dgs_data(config.bcan, config, dc_som,
                                           dataset_type="black_can",
                                           inst_geom_dst=inst_geom_dst,
                                           tib_const=config.tib_const,
                                           cwp=config.cwp_bcan,
                                           timer=tim)
    else:
        b_som1 = None

    # Perform Steps 3-6 on empty can data    
    if config.ecan is not None:
        e_som1 = dr_lib.calibrate_dgs_data(config.ecan, config, dc_som,
                                           dataset_type="empty_can",
                                           inst_geom_dst=inst_geom_dst,
                                           tib_const=config.tib_const,
                                           cwp=config.cwp_ecan,
                                           timer=tim)
    else:
        e_som1 = None

    # Perform Steps 3-6 on normalization data
    n_som1 = dr_lib.calibrate_dgs_data(config.data, config, dc_som,
                                       dataset_type="normalization",
                                       inst_geom_dst=inst_geom_dst,
                                       tib_const=config.tib_const,
                                       cwp=config.cwp_data,
                                       timer=tim)

    # Perform Steps 7-16 on normalization data
    if config.norm_trans_coeff is None:
        norm_trans_coeff = None
    else:
        norm_trans_coeff = config.norm_trans_coeff.toValErrTuple()

    # Determine if we need to rebin the empty or black can data
    if config.ecan is not None and e_som1 is not None:
        ecan_cwp = True
    else:
        ecan_cwp = False

    if config.bcan is not None and b_som1 is not None:
        bcan_cwp = True
    else:
        bcan_cwp = False        

    cwp_used = ecan_cwp or bcan_cwp

    n_som2 = dr_lib.process_dgs_data(n_som1, config, b_som1, e_som1,
                                     norm_trans_coeff,
                                     dataset_type="normalization",
                                     cwp_used=cwp_used,
                                     timer=tim)
        
    del n_som1, b_som1, e_som1

    # Step 17: Integrate normalization spectra
    if config.verbose:
        print "Integrating normalization spectra"

    if tim is not None:
        tim.getTime(False)

    if config.norm_int_range is None:
        start_val = float("inf")
        end_val = float("inf")
    else:
        if not config.wb_norm:
            # Translate energy transfer to final energy
            ef_start = config.initial_energy.getValue() - \
                       config.norm_int_range[0]
            ef_end = config.initial_energy.getValue() - \
                     config.norm_int_range[1]
            # Convert final energy to final wavelength
            start_val = common_lib.energy_to_wavelength((ef_start, 0.0))[0]
            end_val = common_lib.energy_to_wavelength((ef_end, 0.0))[0]
        else:
            start_val = config.norm_int_range[0]
            end_val = config.norm_int_range[1]
            
    n_som3 = dr_lib.integrate_spectra(n_som2, start=start_val,
                                      end=end_val, width=True)

    del n_som2
    
    if tim is not None:
        tim.getTime(msg="After integrating normalization spectra ")

    file_comment = "Normalization Integration range: %0.3fA, %0.3fA" \
                   % (start_val, end_val)
        
    hlr_utils.write_file(config.output, "text/num-info", n_som3,
                         output_ext="norm",
                         data_ext=config.ext_replacement,
                         path_replacement=config.path_replacement,
                         verbose=config.verbose,
                         message="normalization values",
                         comments=[file_comment],
                         tag="Integral", units="counts")   
    
    if tim is not None:
        tim.getTime(False)

    if config.verbose:
        print "Making mask file"

    # Make mask file from threshold
    dr_lib.filter_normalization(n_som3, config.lo_threshold,
                                config.hi_threshold, config)

    if tim is not None:
        tim.getTime(msg="After making mask file ")

    # Write out RMD file
    n_som3.attr_list["config"] = config

    hlr_utils.write_file(config.output, "text/rmd", n_som3,
                         output_ext="rmd",
                         data_ext=config.ext_replacement,         
                         path_replacement=config.path_replacement,
                         verbose=config.verbose,
                         message="metadata")
    
    if tim is not None:
        tim.setOldTime(old_time)
        tim.getTime(msg="Total Running Time")
Example #37
0
def run(config, tim):
    """
    This method is where the data reduction process gets done.

    @param config: Object containing the data reduction configuration
                   information.
    @type config: L{hlr_utils.Configure}

    @param tim: Object that will allow the method to perform timing
                evaluations.
    @type tim: C{sns_time.DiffTime}
    """
    import array_manip
    import common_lib
    import dr_lib
    import DST
    import SOM

    import math

    if tim is not None:
        tim.getTime(False)
        old_time = tim.getOldTime()

    if config.data is None:
        raise RuntimeError("Need to pass a data filename to the driver "\
                           +"script.")

    # Read in sample data geometry if one is provided
    if config.data_inst_geom is not None:
        if config.verbose:
            print "Reading in sample data instrument geometry file"

        data_inst_geom_dst = DST.getInstance("application/x-NxsGeom",
                                             config.data_inst_geom)
    else:
        data_inst_geom_dst = None

    # Read in normalization data geometry if one is provided
    if config.norm_inst_geom is not None:
        if config.verbose:
            print "Reading in normalization instrument geometry file"

        norm_inst_geom_dst = DST.getInstance("application/x-NxsGeom",
                                             config.norm_inst_geom)
    else:
        norm_inst_geom_dst = None

    # Perform Steps 1-2 on sample data
    d_som1 = dr_lib.process_reflp_data(config.data,
                                       config,
                                       None,
                                       config.dbkg_roi_file,
                                       config.no_bkg,
                                       inst_geom_dst=data_inst_geom_dst,
                                       timer=tim)

    # Get the detector angle
    if config.omega is None:
        # Make a fake SO
        so = SOM.SO()
        try:
            theta = hlr_utils.get_special(d_som1.attr_list["Theta"], so)
        except KeyError:
            theta = (float('nan'), float('nan'))
    else:
        theta = config.omega.toFullTuple()

    if theta[0] is not None:
        if theta[2] == "degrees" or theta[2] == "degree":
            theta_rads = (theta[0] * (math.pi / 180.0), 0.0)
        else:
            theta_rads = (theta[0], 0.0)
    else:
        theta_rads = (float('nan'), float('nan'))

    d_som1.attr_list["data-theta"] = (theta_rads[0], theta_rads[1], "radians")

    # Perform Steps 1-3 on normalization data
    if config.norm is not None:
        n_som1 = dr_lib.process_reflp_data(config.norm,
                                           config,
                                           config.norm_roi_file,
                                           config.nbkg_roi_file,
                                           config.no_norm_bkg,
                                           inst_geom_dst=norm_inst_geom_dst,
                                           timer=tim)
    else:
        n_som1 = None

    # Closing sample data instrument geometry file
    if data_inst_geom_dst is not None:
        data_inst_geom_dst.release_resource()

    # Closing normalization data instrument geometry file
    if norm_inst_geom_dst is not None:
        norm_inst_geom_dst.release_resource()

    # Step 4: Divide data by normalization
    if config.verbose and config.norm is not None:
        print "Scale data by normalization"

    if tim is not None:
        tim.getTime(False)

    if config.norm is not None:
        # Need to rebin the normalization spectra to the data pixel spectra
        n_som2 = dr_lib.rebin_monitor(n_som1, d_som1, rtype="frac")
        # Now divide the spectra
        d_som2 = common_lib.div_ncerr(d_som1, n_som2)
        del n_som2
    else:
        d_som2 = d_som1

    if tim is not None and config.norm is not None:
        tim.getTime(msg="After normalizing signal spectra")

    del d_som1, n_som1

    sin_theta_rads = (math.sin(theta_rads[0]), math.sin(theta_rads[1]))
    if sin_theta_rads[0] < 0.0:
        sin_theta_rads = (math.fabs(sin_theta_rads[0]),
                          math.fabs(sin_theta_rads[1]))

    # Step 6: Scale wavelength axis by sin(theta) to make lambda_T
    if config.verbose:
        print "Scaling wavelength axis by sin(theta)"

    if tim is not None:
        tim.getTime(False)

    d_som3 = common_lib.div_ncerr(d_som2, sin_theta_rads, axis="x")

    if tim is not None:
        tim.getTime(msg="After scaling wavelength axis ")

    del d_som2

    d_som3.setAxisLabel(0, "lambda_T")

    # Step 7: Rebin to lambda_T axis
    if config.verbose:
        print "Rebinning spectra"

    if config.lambdap_bins is None:
        # Create a binning scheme
        pathlength = d_som3.attr_list.instrument.get_total_path(
            det_secondary=True)

        delta_lambda = common_lib.tof_to_wavelength((config.delta_TOF, 0.0),
                                                    pathlength=pathlength)

        delta_lambdap = array_manip.div_ncerr(delta_lambda[0], delta_lambda[1],
                                              sin_theta_rads[0], 0.0)

        config.lambdap_bins = dr_lib.create_axis_from_data(
            d_som3, width=delta_lambdap[0])
    else:
        # Do nothing, got the binning scheme
        pass

    if tim is not None:
        tim.getTime(False)

    d_som4 = common_lib.rebin_axis_1D_frac(d_som3,
                                           config.lambdap_bins.toNessiList())

    if tim is not None:
        tim.getTime(msg="After rebinning spectra ")

    del d_som3

    if config.inst == "REF_M":
        # Clean up spectrum
        if config.tof_cut_min is not None:
            tof_cut_min = float(config.tof_cut_min)
        else:
            tof_cut_min = config.TOF_min

        if config.tof_cut_max is not None:
            tof_cut_max = float(config.tof_cut_max)
        else:
            tof_cut_max = config.TOF_max

        pathlength = d_som4.attr_list.instrument.get_total_path(
            det_secondary=True)

        lambda_min = common_lib.tof_to_wavelength((tof_cut_min, 0.0),
                                                  pathlength=pathlength)

        lambda_T_min = common_lib.div_ncerr(lambda_min, sin_theta_rads)

        lambda_max = common_lib.tof_to_wavelength((tof_cut_max, 0.0),
                                                  pathlength=pathlength)

        lambda_T_max = common_lib.div_ncerr(lambda_max, sin_theta_rads)

        nz_list = []
        for i in xrange(hlr_utils.get_length(d_som4)):
            nz_list.append((lambda_T_min[0], lambda_T_max[0]))

        d_som4A = dr_lib.zero_spectra(d_som4, nz_list)
    else:
        d_som4A = d_som4

    del d_som4

    # Step 8: Write out all spectra to a file
    hlr_utils.write_file(config.output,
                         "text/Spec",
                         d_som4A,
                         replace_ext=False,
                         replace_path=False,
                         verbose=config.verbose,
                         message="Reflectivity information")

    if config.dump_twod:
        d_som5 = dr_lib.create_X_vs_pixpos(d_som4A,
                                           config.lambdap_bins.toNessiList(),
                                           rebin=False,
                                           y_label="R",
                                           y_units="",
                                           x_label="$\lambda_T$",
                                           x_units="$\AA$")

        hlr_utils.write_file(config.output,
                             "text/Dave2d",
                             d_som5,
                             output_ext="plp",
                             verbose=config.verbose,
                             data_ext=config.ext_replacement,
                             path_replacement=config.path_replacement,
                             message="2D Reflectivity information")

    d_som4A.attr_list["config"] = config

    hlr_utils.write_file(config.output,
                         "text/rmd",
                         d_som4A,
                         output_ext="rmd",
                         verbose=config.verbose,
                         data_ext=config.ext_replacement,
                         path_replacement=config.path_replacement,
                         message="metadata")

    if tim is not None:
        tim.setOldTime(old_time)
        tim.getTime(msg="Total Running Time")
Example #38
0
# the United States Government.  Neither the United States Government nor the
# United States Department of Energy, nor any of their employees, makes any
# warranty, express or implied, or assumes any legal liability or
# responsibility for the accuracy, completeness, or usefulness of any
# information, apparatus, product, or process disclosed, or represents that
# its use would not infringe privately owned rights.
#

# $Id$

import DST
import hlr_utils
import SOM
import sys

if __name__ == "__main__":
    filename = None
    try:
        filename = sys.argv[1]
    except IndexError:
        pass # use the default name

    dst = DST.getInstance("application/x-NeXus", filename)
    som = dst.getSOM(("/entry/bank1", 1))

    # Write out file
    ofile = open("test.par", "w")
    par = DST.ParDST(ofile)
    par.writeSOM(som)
    par.release_resource()
Example #39
0
def run(config, tim=None):
    """
    This method is where the data reduction process gets done.

    @param config: Object containing the data reduction configuration
                   information.
    @type config: L{hlr_utils.Configure}

    @param tim: (OPTIONAL) Object that will allow the method to perform
                           timing evaluations.
    @type tim: C{sns_time.DiffTime}
    """
    import common_lib
    import dr_lib
    import DST

    if tim is not None:
        tim.getTime(False)
        old_time = tim.getOldTime()

    if config.data is None:
        raise RuntimeError("Need to pass a data filename to the driver "\
                           +"script.")

    # Read in geometry if one is provided
    if config.inst_geom is not None:
        if config.verbose:
            print "Reading in instrument geometry file"

        inst_geom_dst = DST.getInstance("application/x-NxsGeom",
                                        config.inst_geom)
    else:
        inst_geom_dst = None

    only_background = False
    data_type = "transmission"

    # Perform Steps 1,6-7 or 1,3,5-7 on sample data
    d_som1 = dr_lib.process_sas_data(config.data,
                                     config,
                                     timer=tim,
                                     inst_geom_dst=inst_geom_dst,
                                     dataset_type=data_type,
                                     transmission=True,
                                     get_background=only_background)

    # Perform Steps 1,6-7 on background data
    if config.back is not None:
        b_som1 = dr_lib.process_sas_data(config.back,
                                         config,
                                         timer=tim,
                                         inst_geom_dst=inst_geom_dst,
                                         dataset_type="trans_bkg",
                                         transmission=True)
    else:
        b_som1 = None

    # Put the datasets on the same axis
    d_som2 = dr_lib.sum_by_rebin_frac(d_som1, config.lambda_bins.toNessiList())
    del d_som1

    if b_som1 is not None:
        b_som2 = dr_lib.sum_by_rebin_frac(b_som1,
                                          config.lambda_bins.toNessiList())
    else:
        b_som2 = None

    del b_som1

    # Divide the data spectrum by the background spectrum
    if b_som2 is not None:
        d_som3 = common_lib.div_ncerr(d_som2, b_som2)
    else:
        d_som3 = d_som2

    del d_som2, b_som2

    # Reset y units to dimensionless for the tranmission due to ratio
    if config.back is not None:
        d_som3.setYLabel("Ratio")
        d_som3.setYUnits("")
        write_message = "transmission spectrum"
    else:
        write_message = "spectrum for background estimation"

    # Write out the transmission spectrum
    hlr_utils.write_file(config.output,
                         "text/Spec",
                         d_som3,
                         verbose=config.verbose,
                         replace_path=False,
                         replace_ext=False,
                         message=write_message)

    d_som3.attr_list["config"] = config

    hlr_utils.write_file(config.output,
                         "text/rmd",
                         d_som3,
                         output_ext="rmd",
                         data_ext=config.ext_replacement,
                         path_replacement=config.path_replacement,
                         verbose=config.verbose,
                         message="metadata")

    if tim is not None:
        tim.setOldTime(old_time)
        tim.getTime(msg="Total Running Time")
Example #40
0
def run(config, tim=None):
    """
    This method is where the data reduction process gets done.

    @param config: Object containing the data reduction configuration
                   information.
    @type config: L{hlr_utils.Configure}

    @param tim: (OPTIONAL) Object that will allow the method to perform
                           timing evaluations.
    @type tim: C{sns_time.DiffTime}
    """
    import common_lib
    import dr_lib
    import DST

    if tim is not None:
        tim.getTime(False)
        old_time = tim.getOldTime()

    if config.data is None:
        raise RuntimeError("Need to pass a data filename to the driver "\
                           +"script.")

    # Read in geometry if one is provided
    if config.inst_geom is not None:
        if config.verbose:
            print "Reading in instrument geometry file"

        inst_geom_dst = DST.getInstance("application/x-NxsGeom",
                                        config.inst_geom)
    else:
        inst_geom_dst = None

    config.so_axis = "time_of_flight"

    try:
        if type(config.mask_file) == type([]):
            if len(config.mask_file) > 1:
                if config.verbose:
                    print "Creating combined mask file"

                if tim is not None:
                    tim.getTime(False)

                config.mask_file = hlr_utils.merge_roi_files(\
                        config.mask_file,
                        config)

                if tim is not None:
                    tim.getTime(msg="After creating combined mask file")
            else:
                config.mask_file = config.mask_file[0]
        else:
            # Do nothing since it's already a string
            pass
    except TypeError:
        # No mask files provided, do nothing
        pass

    # Steps 1-3: Produce a scaled summed dark current dataset
    dc_som = dr_lib.scaled_summed_data(config.dkcur,
                                       config,
                                       dataset_type="dark_current",
                                       timer=tim)

    # Perform Steps 3-6 on black can data
    if config.bcan is not None:
        b_som1 = dr_lib.calibrate_dgs_data(config.bcan,
                                           config,
                                           dc_som,
                                           dataset_type="black_can",
                                           inst_geom_dst=inst_geom_dst,
                                           tib_const=config.tib_const,
                                           cwp=config.cwp_bcan,
                                           timer=tim)
    else:
        b_som1 = None

    # Perform Steps 3-6 on empty can data
    if config.ecan is not None:
        e_som1 = dr_lib.calibrate_dgs_data(config.ecan,
                                           config,
                                           dc_som,
                                           dataset_type="empty_can",
                                           inst_geom_dst=inst_geom_dst,
                                           tib_const=config.tib_const,
                                           cwp=config.cwp_ecan,
                                           timer=tim)
    else:
        e_som1 = None

    # Perform Steps 3-6 on sample data
    d_som1 = dr_lib.calibrate_dgs_data(config.data,
                                       config,
                                       dc_som,
                                       inst_geom_dst=inst_geom_dst,
                                       tib_const=config.tib_const,
                                       cwp=config.cwp_data,
                                       timer=tim)

    # Perform Steps 7-16 on sample data
    if config.data_trans_coeff is None:
        data_trans_coeff = None
    else:
        data_trans_coeff = config.data_trans_coeff.toValErrTuple()

    # Determine if we need to rebin the empty or black can data
    if config.ecan is not None and e_som1 is not None:
        ecan_cwp = True
    else:
        ecan_cwp = False

    if config.bcan is not None and b_som1 is not None:
        bcan_cwp = True
    else:
        bcan_cwp = False

    cwp_used = ecan_cwp or bcan_cwp

    d_som2 = dr_lib.process_dgs_data(d_som1,
                                     config,
                                     b_som1,
                                     e_som1,
                                     data_trans_coeff,
                                     cwp_used=cwp_used,
                                     timer=tim)

    del d_som1

    del b_som1, e_som1

    # Step 18: Normalize sample data by integrated values
    if config.norm is not None:
        if config.verbose:
            print "Reading normalization information"

        norm_int = dr_lib.add_files(config.norm,
                                    Signal_ROI=config.roi_file,
                                    Signal_MASK=config.mask_file,
                                    dataset_type="normalization",
                                    dst_type="text/num-info",
                                    Verbose=config.verbose,
                                    Timer=tim)

        # Make the labels and units compatible with a NeXus file based SOM
        norm_int.setAllAxisLabels(["wavelength"])
        norm_int.setAllAxisUnits(["Angstroms"])
        norm_int.setYLabel("Intensity")
        norm_int.setYUnits("Counts/Angstroms")

        if config.verbose:
            print "Normalizing data by normalization data"

        if tim is not None:
            tim.getTime(False)

        d_som3 = common_lib.div_ncerr(d_som2, norm_int)

        if tim is not None:
            tim.getTime(msg="After normalizing data ")

        del norm_int
    else:
        d_som3 = d_som2

    del d_som2

    # Step 19: Calculate the initial energy
    if config.initial_energy is not None:
        d_som3.attr_list["Initial_Energy"] = config.initial_energy

    # Steps 20-21: Calculate the energy transfer
    if config.verbose:
        print "Calculating energy transfer"

    if tim is not None:
        tim.getTime(False)

    d_som4 = dr_lib.energy_transfer(d_som3,
                                    "DGS",
                                    "Initial_Energy",
                                    lojac=True,
                                    scale=config.lambda_ratio)

    if tim is not None:
        tim.getTime(msg="After calculating energy transfer ")

    del d_som3

    # Step 22: Rebin energy transfer spectra
    if config.verbose:
        print "Rebinning to final energy transfer axis"

    if tim is not None:
        tim.getTime(False)

    d_som5 = common_lib.rebin_axis_1D_frac(d_som4, config.E_bins.toNessiList())

    if tim is not None:
        tim.getTime(msg="After rebinning energy transfer ")

    del d_som4

    if config.dump_et_comb:
        d_som5_1 = dr_lib.sum_all_spectra(d_som5)
        hlr_utils.write_file(config.output,
                             "text/Spec",
                             d_som5_1,
                             output_ext="et",
                             data_ext=config.ext_replacement,
                             path_replacement=config.path_replacement,
                             verbose=config.verbose,
                             message="combined energy transfer information")

        del d_som5_1

    # Get the corner geometry information
    if config.verbose:
        print "Reading in corner geometry information"

    if tim is not None:
        tim.getTime(False)

    corner_angles = hlr_utils.get_corner_geometry(config.corner_geom)

    if tim is not None:
        tim.getTime(msg="After reading in corner geometry information ")

    if config.make_spe:
        d_som5.attr_list["corner_geom"] = corner_angles

        hlr_utils.write_file(config.output,
                             "text/PHX",
                             d_som5,
                             output_ext="phx",
                             data_ext=config.ext_replacement,
                             path_replacement=config.path_replacement,
                             verbose=config.verbose,
                             message="PHX information")

        hlr_utils.write_file(config.output,
                             "text/PAR",
                             d_som5,
                             output_ext="par",
                             data_ext=config.ext_replacement,
                             path_replacement=config.path_replacement,
                             verbose=config.verbose,
                             message="PAR information")

        hlr_utils.write_file(config.output,
                             "text/SPE",
                             d_som5,
                             output_ext="spe",
                             data_ext=config.ext_replacement,
                             path_replacement=config.path_replacement,
                             verbose=config.verbose,
                             message="SPE information")

    # Steps 23-34: Create S(Q, E) distribution
    if config.verbose:
        print "Creating S(Q, E)"

    if tim is not None:
        tim.getTime(False)

    d_som5_2 = dr_lib.create_E_vs_Q_dgs(d_som5,
                                        config.initial_energy.toValErrTuple(),
                                        config.Q_bins.toNessiList(),
                                        corner_angles=corner_angles,
                                        split=config.split,
                                        configure=config,
                                        timer=tim)

    # Writing 2D DAVE file
    if not config.split:
        hlr_utils.write_file(config.output,
                             "text/Dave2d",
                             d_som5_2,
                             output_ext="sqe",
                             data_ext=config.ext_replacement,
                             path_replacement=config.path_replacement,
                             verbose=config.verbose,
                             message="S(Q,E)")

        hlr_utils.write_file(config.output,
                             "application/x-RedNxs",
                             d_som5_2,
                             output_ext="nxs",
                             data_ext=config.ext_replacement,
                             path_replacement=config.path_replacement,
                             verbose=config.verbose,
                             extra_tag="sqe",
                             getsom_kwargs={"entry_name": "sqe"},
                             message="NeXus S(Q,E)")

    if tim is not None:
        tim.getTime(msg="After calculating S(Q,E) spectrum ")

    del d_som5_2

    if config.qmesh:
        # Steps 23-27,35-36: Create S(Qvec, E) distribution
        if config.verbose:
            print "Creating S(Qvec, E)"

        if tim is not None:
            tim.getTime(False)

        dr_lib.create_Qvec_vs_E_dgs(d_som5,
                                    config.initial_energy.toValErrTuple(),
                                    config,
                                    corner_angles=corner_angles,
                                    make_fixed=config.fixed,
                                    output=config.output,
                                    timer=tim)

        if tim is not None:
            tim.getTime(msg="After calculating final spectrum ")

    # Write out RMD file
    d_som5.attr_list["config"] = config

    hlr_utils.write_file(config.output,
                         "text/rmd",
                         d_som5,
                         output_ext="rmd",
                         data_ext=config.ext_replacement,
                         path_replacement=config.path_replacement,
                         verbose=config.verbose,
                         message="metadata")

    if tim is not None:
        tim.setOldTime(old_time)
        tim.getTime(msg="Total Running Time")
Example #41
0
def run(config, tim):
    """
    This method is where the data reduction process gets done.

    @param config: Object containing the data reduction configuration
                   information.
    @type config: L{hlr_utils.Configure}

    @param tim: Object that will allow the method to perform timing
                evaluations.
    @type tim: C{sns_time.DiffTime}
    """
    import DST
    import math
    if config.inst == "REF_M":
        import axis_manip
        import utils

    if tim is not None:
        tim.getTime(False)
        old_time = tim.getOldTime()

    if config.data is None:
        raise RuntimeError("Need to pass a data filename to the driver "\
                           +"script.")

    # Read in sample data geometry if one is provided
    if config.data_inst_geom is not None:
        if config.verbose:
            print "Reading in sample data instrument geometry file"
            
        data_inst_geom_dst = DST.getInstance("application/x-NxsGeom",
                                             config.data_inst_geom)
    else:
        data_inst_geom_dst = None

    # Read in normalization data geometry if one is provided
    if config.norm_inst_geom is not None:
        if config.verbose:
            print "Reading in normalization instrument geometry file"
            
        norm_inst_geom_dst = DST.getInstance("application/x-NxsGeom",
                                        config.norm_inst_geom)
    else:
        norm_inst_geom_dst = None        
    
    # Perform Steps 1-6 on sample data
    d_som1 = dr_lib.process_ref_data(config.data, config,
                                     config.data_roi_file,
                                     config.dbkg_roi_file,
                                     config.no_bkg,
                                     tof_cuts=config.tof_cuts,
                                     inst_geom_dst=data_inst_geom_dst,
                                     no_tof_cuts=True,
                                     timer=tim)

    # Perform Steps 1-6 on normalization data
    if config.norm is not None:
        n_som1 = dr_lib.process_ref_data(config.norm, config,
                                         config.norm_roi_file,
                                         config.nbkg_roi_file,
                                         config.no_norm_bkg,
                                         dataset_type="norm",
                                         tof_cuts=config.tof_cuts,
                                         inst_geom_dst=norm_inst_geom_dst,
                                         no_tof_cuts=True,
                                         timer=tim)
    else:
        n_som1 = None

    if config.Q_bins is None and config.scatt_angle is not None:
        import copy
        tof_axis = copy.deepcopy(d_som1[0].axis[0].val)

    # Closing sample data instrument geometry file
    if data_inst_geom_dst is not None:
        data_inst_geom_dst.release_resource()

    # Closing normalization data instrument geometry file
    if norm_inst_geom_dst is not None:
        norm_inst_geom_dst.release_resource()        

    # Step 7: Sum all normalization spectra together
    if config.norm is not None:
        n_som2 = dr_lib.sum_all_spectra(n_som1)
    else:
        n_som2 = None

    del n_som1

    # Step 8: Divide data by normalization
    if config.verbose and config.norm is not None:
        print "Scale data by normalization"

    if config.norm is not None:
        d_som2 = common_lib.div_ncerr(d_som1, n_som2, length_one_som=True)
    else:
        d_som2 = d_som1

    if tim is not None and config.norm is not None:
        tim.getTime(msg="After normalizing signal spectra")

    del d_som1, n_som2

    if config.dump_rtof_comb:
        d_som2_1 = dr_lib.sum_all_spectra(d_som2)
        d_som2_2 = dr_lib.data_filter(d_som2_1)
        del d_som2_1

        if config.inst == "REF_M":
            tof_bc = utils.calc_bin_centers(d_som2_2[0].axis[0].val)
            d_som2_2[0].axis[0].val = tof_bc[0]
            d_som2_2.setDataSetType("density")

        d_som2_3 = dr_lib.cut_spectra(d_som2_2, config.tof_cut_min,
                                      config.tof_cut_max)
        del d_som2_2
        
        hlr_utils.write_file(config.output, "text/Spec", d_som2_3,
                             output_ext="crtof",
                             verbose=config.verbose,
                             data_ext=config.ext_replacement,
                             path_replacement=config.path_replacement,
                             message="combined R(TOF) information")
        del d_som2_3

    if config.dump_rtof:
        if config.inst == "REF_M":
            d_som2_1 = d_som2
        else:
            d_som2_1 = dr_lib.filter_ref_data(d_som2)

        d_som2_2 = dr_lib.cut_spectra(d_som2_1, config.tof_cut_min,
                                      config.tof_cut_max)
        del d_som2_1
        hlr_utils.write_file(config.output, "text/Spec", d_som2_2,
                             output_ext="rtof",
                             verbose=config.verbose,
                             data_ext=config.ext_replacement,
                             path_replacement=config.path_replacement,
                             message="R(TOF) information")
        del d_som2_2

    # Step 9: Convert TOF to scalar Q
    if config.verbose:
        print "Converting TOF to scalar Q"
        if config.beamdiv_corr:
            print "Applying beam divergence correction"
    
    # Check to see if polar angle offset is necessary
    if config.angle_offset is not None:
        # Check on units, offset must be in radians
        p_offset = hlr_utils.angle_to_radians(config.angle_offset)
        d_som2.attr_list["angle_offset"] = config.angle_offset
    else:
        p_offset = None

    # Check to see if scattering angle is requested
    if config.scatt_angle is not None:
        # Mainly used by REF_M
        scatt_angle = hlr_utils.angle_to_radians(config.scatt_angle)
        scatt_angle = (scatt_angle[0]/2.0, scatt_angle[1])
    else:
        scatt_angle = None
    
    if tim is not None:
        tim.getTime(False)

    d_som3 = dr_lib.tof_to_ref_scalar_Q(d_som2, units="microsecond",
                                        angle_offset=p_offset,
                                        lojac=False,
                                        polar=scatt_angle,
                                        configure=config)
    
    del d_som2
            
    if tim is not None:
        tim.getTime(msg="After converting wavelength to scalar Q ")

    # Calculate the Q cut range from the TOF cuts range
    if scatt_angle is not None:
        polar_angle = (scatt_angle[0]/2.0, scatt_angle[1])
    else:
        polar_angle = (d_som3.attr_list["data-theta"][0], 0)

    if p_offset is not None:
        polar_angle = (polar_angle[0] + p_offset[0],
                       polar_angle[1] + p_offset[1])

    pl = d_som3.attr_list.instrument.get_total_path(det_secondary=True)
    # Since Q ~ 1/T, need to reverse cut designation
    if config.tof_cut_min is not None:
        Q_cut_max = dr_lib.tof_to_ref_scalar_Q((float(config.tof_cut_min), 0.0),
                                               pathlength=pl,
                                               polar=polar_angle)[0]
    else:
        Q_cut_max = None
        
    if config.tof_cut_max is not None:
        Q_cut_min = dr_lib.tof_to_ref_scalar_Q((float(config.tof_cut_max), 0.0),
                                               pathlength=pl,
                                               polar=polar_angle)[0]
    else:
        Q_cut_min = None
    
    if config.dump_rq:
        d_som3_1 = dr_lib.data_filter(d_som3, clean_axis=True)
        d_som3_2 = dr_lib.cut_spectra(d_som3_1, Q_cut_min, Q_cut_max)
        del d_som3_1
        hlr_utils.write_file(config.output, "text/Spec", d_som3_2,
                             output_ext="rq",
                             verbose=config.verbose,
                             data_ext=config.ext_replacement,
                             path_replacement=config.path_replacement,
                             message="pixel R(Q) information")
        del d_som3_2
                    
    if config.Q_bins is not None or config.beamdiv_corr:
        if config.verbose:
            print "Rebinning data"
        d_som4 = common_lib.rebin_axis_1D_frac(d_som3,
                                               config.Q_bins.toNessiList())
        
        if config.dump_rqr:
            d_som4_1 = dr_lib.data_filter(d_som4, clean_axis=True)
            d_som4_2 = dr_lib.cut_spectra(d_som4_1, Q_cut_min, Q_cut_max)
            del d_som4_1
            hlr_utils.write_file(config.output, "text/Spec", d_som4_2,
                                 output_ext="rqr",
                                 verbose=config.verbose,
                                 data_ext=config.ext_replacement,
                                 path_replacement=config.path_replacement,
                                 message="rebinned pixel R(Q) information")
            del d_som4_2
    else:
        d_som4 = d_som3

    del d_som3

    if not config.no_filter:
        if config.verbose:
            print "Filtering final data"
            
        if tim is not None:
            tim.getTime(False)
            
        d_som5 = dr_lib.data_filter(d_som4)
    
        if tim is not None:
            tim.getTime(msg="After filtering data")
    else:
        d_som5 = d_som4
    
    del d_som4

    # Sum all spectra since everything is on same axis
    d_som6 = dr_lib.sum_all_spectra(d_som5)
    
    del d_som5

    d_som7 = dr_lib.cut_spectra(d_som6, Q_cut_min, Q_cut_max,
                                num_bins_clean=config.num_bins_clean)

    del d_som6

    hlr_utils.write_file(config.output, "text/Spec", d_som7,
                         replace_ext=False,
                         replace_path=False,
                         verbose=config.verbose,
                         message="combined Reflectivity information")

    d_som7.attr_list["config"] = config

    hlr_utils.write_file(config.output, "text/rmd", d_som7,
                         output_ext="rmd", verbose=config.verbose,
                         data_ext=config.ext_replacement,
                         path_replacement=config.path_replacement,
                         message="metadata")

    if tim is not None:
        tim.setOldTime(old_time)
        tim.getTime(msg="Total Running Time")
Example #42
0
# $Id$

import DST
import hlr_utils
import SOM
import sys

if __name__ == "__main__":
    filename = None
    try:
        filename = sys.argv[1]
    except IndexError:
        pass  # use the default name

    dst = DST.getInstance("application/x-NeXus", filename)
    som = dst.getSOM(("/entry/bank1", 1))

    if "ARCS" in filename:
        cgeom = "/SNS/ARCS/2009_2_18_CAL/calibrations/ARCS_cgeom_20090128.txt"
    elif "CNCS" in filename:
        cgeom = "/SNS/CNCS/2009_2_5_CAL/calibrations/CNCS_cgeom_20090224.txt"
    elif "SEQ" in filename or "SEQUOIA" in filename:
        cgeom = "/SNS/SEQ/2009_2_17_CAL/calibrations/SEQ_cgeom_20090302.txt"
    else:
        raise RuntimeError("Cannot get corner geometry file")

    # Get corner geometry
    infile = open(hlr_utils.fix_filename(cgeom), "r")

    angle_info = {}
Example #43
0
def add_files_dm(filelist, **kwargs):
    """
    This function takes a list of U{NeXus<www.nexusformat.org>} files and
    various keyword arguments and returns a data C{SOM} and a monitor C{SOM}
    that is the sum of all the data from the specified files. B{It is assumed
    that the files contain similar data as only crude cross-checks will be
    made. You have been warned.}

    @param filelist: A list containing the names of the files to sum
    @type filelist: C{list}
    
    @param kwargs: A list of keyword arguments that the function accepts:
    
    @keyword SO_Axis: This is the name of the main axis to read from the NeXus
                      file
    @type SO_Axis: C{string}
    
    @keyword Data_Paths: This contains the data paths and signals for the
                         requested detector banks
    @type Data_Paths: C{tuple} of C{tuple}s

    @keyword Mon_Paths: This contains the data paths and signals for the
                        requested monitor banks
    @type Mon_Paths: C{tuple} of C{tuple}s    
    
    @keyword Signal_ROI: This is the name of a file that contains a list of
                         pixel IDs that will be read from the data file and
                         stored as a signal C{SOM}
    @type Signal_ROI: C{string}

    @keyword Signal_MASK: This is the name of a file that contains a list of
                         pixel IDs that will be read from the data file and
                         stored as a signal C{SOM}
    @type Signal_MASK: C{string}

    @keyword dataset_type: The practical name of the dataset being processed.
                           The default value is I{data}.
    @type dataset_type: C{string}

    @keyword dataset_cwp: A set of chopper phase corrections for the dataset.
                          This will instruct the function to shift the TOF
                          axes of mulitple datasets and perform rebinning. The
                          TOF axis for the first dataset is the one that all
                          other datasets will be rebinned to.
    @type dataset_cwp: C{list} of C{float}s

    @keyword Verbose: This is a flag to turn on print statments. The default is
                      I{False}.
    @type Verbose: C{boolean}
    
    @keyword Timer: This is an SNS Timer object used for showing the
                    performance timing in the function.
    @type Timer: C{sns_timing.Timer}


    @return: Signal C{SOM.SOM} and monitor C{SOM.SOM}
    @rtype: C{tuple}

    
    @raise SystemExit: If any file cannot be read
    @raise RuntimeError: If both a ROI and MASK file are specified
    """
    import sys

    import common_lib
    import DST
    
    # Parse keywords
    try:
        so_axis = kwargs["SO_Axis"]
    except KeyError:
        so_axis = "time_of_flight"
    
    try:
        data_paths = kwargs["Data_Paths"]
    except KeyError:
        data_paths = None

    try:
        mon_paths = kwargs["Mon_Paths"]
    except KeyError:
        mon_paths = None        

    try:
        signal_roi = kwargs["Signal_ROI"]
    except KeyError:
        signal_roi = None

    try:
        signal_mask = kwargs["Signal_MASK"]
    except KeyError:
        signal_mask = None         

    try:
        dataset_type = kwargs["dataset_type"]
    except KeyError:
        dataset_type = "data"

    try:
        verbose = kwargs["Verbose"]
    except KeyError:
        verbose = False

    try:
        timer = kwargs["Timer"]
    except KeyError:
        timer = None

    dataset_cwp = kwargs.get("dataset_cwp")

    if signal_roi is not None and signal_mask is not None:
        raise RuntimeError("Cannot specify both ROI and MASK file! Please "\
                           +"choose!")

    dst_type = "application/x-NeXus"
    counter = 0

    for filename in filelist:
        if verbose:
            print "File:", filename
            if dataset_cwp is not None:
                print "TOF Offset:", dataset_cwp[counter]

        if dataset_cwp is not None:
            cwp = dataset_cwp[counter]
        else:
            cwp = None

        try:
            data_dst = DST.getInstance(dst_type, filename)
        except SystemError:
            print "ERROR: Failed to data read file %s" % filename
            sys.exit(-1)

        if timer is not None:
            timer.getTime(msg="After parsing file")

        if verbose:
            print "Reading data file %d" % counter

        if counter == 0:
            d_som1 = data_dst.getSOM(data_paths, so_axis, roi_file=signal_roi,
                                     mask_file=signal_mask, tof_offset=cwp)
            d_som1.rekeyNxPars(dataset_type)

            if verbose:
                print "# Signal SO:", len(d_som1)
                try:
                    print "# TOF:", len(d_som1[0])
                    print "# TOF Axis:", len(d_som1[0].axis[0].val)
                except IndexError:
                    # No data is present so say so again
                    print "information is unavailable since no data "\
                          +"present. Exiting."
                    sys.exit(0)

            if timer is not None:
                timer.getTime(msg="After reading data")

            if mon_paths is not None:
                if verbose:
                    print "Reading monitor %d" % counter

                if counter == 0:
                    m_som1 = data_dst.getSOM(mon_paths, so_axis,
                                             tof_offset=cwp)
                    m_som1.rekeyNxPars(dataset_type)

                if verbose:
                    print "# Monitor SO:", len(m_som1)
                    print "# TOF:", len(m_som1[0])
                    print "# TOF Axis:", len(m_som1[0].axis[0].val)

                if timer is not None:
                    timer.getTime(msg="After reading monitor data")
            else:
                m_som1 = None
        else:
            d_som_t0 = data_dst.getSOM(data_paths, so_axis,
                                       roi_file=signal_roi,
                                       mask_file=signal_mask, tof_offset=cwp)
            d_som_t0.rekeyNxPars(dataset_type)
            
            if timer is not None:
                timer.getTime(msg="After reading data")

            if dataset_cwp is not None:
                d_som_t = common_lib.rebin_axis_1D_frac(d_som_t0,
                                                        d_som1[0].axis[0].val)
                del d_som_t0
            else:
                d_som_t = d_som_t0

            d_som1 = common_lib.add_ncerr(d_som_t, d_som1, add_nxpars=True)

            if timer is not None:
                timer.getTime(msg="After adding data spectra")

            del d_som_t

            if timer is not None:
                timer.getTime(msg="After data SOM deletion")

            if mon_paths is not None:
                m_som_t0 = data_dst.getSOM(mon_paths, so_axis, tof_offset=cwp)
                m_som_t0.rekeyNxPars(dataset_type)
                
                if timer is not None:
                    timer.getTime(msg="After reading monitor data")

                if dataset_cwp is not None:
                    m_som_t = common_lib.rebin_axis_1D_frac(m_som_t0,
                                                        m_som1[0].axis[0].val)
                    del m_som_t0
                else:
                    m_som_t = m_som_t0

                m_som1 = common_lib.add_ncerr(m_som_t, m_som1, add_nxpars=True)

                if timer is not None:
                    timer.getTime(msg="After adding monitor spectra")

                del m_som_t            

                if timer is not None:
                    timer.getTime(msg="After monitor SOM deletion")
                
        data_dst.release_resource()
        del data_dst
        counter += 1

        if timer is not None:
            timer.getTime(msg="After resource release and DST deletion")

        som_key_parts = [dataset_type, "filename"]
        som_key = "-".join(som_key_parts)
        
        d_som1.attr_list[som_key] = filelist
        if m_som1 is not None:
            m_som1.attr_list[som_key] = filelist

    return (d_som1, m_som1)