Esempio n. 1
0
def command_line_interface():
    '''standard command-line interface'''
    cmd_args = get_user_options()

    if len(cmd_args.output_file) > 0:
        output_filename = cmd_args.output_file
    else:
        output_filename = cmd_args.hdf5_file
    s_num_bins = str(cmd_args.num_bins)

    needs_calc = {}
    pvwatch.logMessage("Reading USAXS FlyScan data file: " +
                       cmd_args.hdf5_file)
    scan = UsaxsFlyScan(cmd_args.hdf5_file)

    # 2015-06-08,prj: no need for archives now
    #if cmd_args.no_archive:
    #    print '  skipping check for archived original file'
    #else:
    #    afile = scan.make_archive()
    #    if afile is not None:
    #        print '  archived original file to ' + afile

    pvwatch.logMessage('  checking for previously-saved R(Q)')
    scan.read_reduced()
    needs_calc['full'] = not scan.has_reduced('full')
    if cmd_args.recompute_full:
        needs_calc['full'] = True
    needs_calc[s_num_bins] = not scan.has_reduced(s_num_bins)
    if cmd_args.recompute_rebinned:
        needs_calc[s_num_bins] = True
    # needs_calc['250'] = True    # FIXME: developer only

    if needs_calc['full']:
        pvwatch.logMessage('  reducing FlyScan to R(Q)')
        scan.reduce()
        if 'full' not in scan.reduced:
            pvwatch.logMessage(
                '  no reduced R(Q) when checking for previously-saved ' +
                output_filename)
            return
        pvwatch.logMessage('  saving reduced R(Q) to ' + output_filename)
        scan.save(cmd_args.hdf5_file, 'full')
        needs_calc[s_num_bins] = True
    if needs_calc[s_num_bins]:
        pvwatch.logMessage('  rebinning R(Q) (from %d) to %d points' %
                           (scan.reduced['full']['Q'].size, cmd_args.num_bins))
        scan.rebin(cmd_args.num_bins)
        pvwatch.logMessage('  saving rebinned R(Q) to ' + output_filename)
        scan.save(cmd_args.hdf5_file, s_num_bins)
    return scan
Esempio n. 2
0
def reduce_area_detector_data(hdf5_file,
                              num_bins,
                              recompute_full=False,
                              recompute_rebinned=False,
                              output_filename=None):
    '''
    reduce areaDetector image data to R(Q)

    :param str hdf5_file: name of HDF5 file with AD image data
    :param int num_bins: number of bins in rebinned data set
    :param bool recompute_full: set True to force recompute,
           even if reduced data already in data file (default: False)
    :param bool recompute_rebinned: set True to force recompute,
           even if reduced data already in data file (default: False)
    :param str output_filename: name of file to write reduced data
           if None, use hdf5_file (default: None)
    '''
    needs_calc = {}
    pvwatch.logMessage( "Area Detector data file: " + hdf5_file )
    scan = AD_ScatteringImage(hdf5_file)   # initialize the object

    s_num_bins = str(num_bins)
    output_filename = output_filename or hdf5_file

    pvwatch.logMessage( '  checking for previously-saved R(Q)' )
    scan.read_reduced()

    needs_calc['full'] = not scan.has_reduced('full')
    if recompute_full:
        needs_calc['full'] = True
    needs_calc[s_num_bins] = not scan.has_reduced(s_num_bins)
    if recompute_rebinned:
        needs_calc[s_num_bins] = True

    if needs_calc['full']:
        pvwatch.logMessage('  reducing Area Detector image to R(Q)')
        scan.reduce()
        pvwatch.logMessage( '  saving reduced R(Q) to ' + output_filename)
        scan.save(hdf5_file, 'full')
        needs_calc[s_num_bins] = True

    if needs_calc[s_num_bins]:
        msg = '  rebinning R(Q) (from %d) to %d points'
        msg = msg % (scan.reduced['full']['Q'].size, num_bins)
        pvwatch.logMessage( msg )
        scan.rebin(num_bins)
        pvwatch.logMessage( '  saving rebinned R(Q) to ' + output_filename )
        scan.save(hdf5_file, s_num_bins)

    return scan
Esempio n. 3
0
    scan = reduceFlyData.command_line_interface()

    plotfile = os.path.join(
        path, 'test_reduceFly__' + os.path.basename(hdf5_file) + '_.png')

    plot_mpl.spec_plot(
        scan.reduced['full']['Q'],
        scan.reduced['full']['R'],
        plotfile,
        xtitle='Q',
        ytitle='R',
        ylog=True,
    )

    plotfile = os.path.join(
        path, 'test_reduceFly_USAXS_' + os.path.basename(hdf5_file) + '_.png')
    ds_full = plot_mpl.Plottable_USAXS_Dataset()
    ds_full.label = 'full data'
    ds_full.Q = scan.reduced['full']['Q']
    ds_full.I = scan.reduced['full']['R']

    ds_250 = plot_mpl.Plottable_USAXS_Dataset()
    ds_250.label = 'rebinned (250) data'
    ds_250.Q = scan.reduced['250']['Q']
    ds_250.I = scan.reduced['250']['R']

    pvwatch.logMessage('  plotting to ' + plotfile)

    plot_mpl.livedata_plot([ds_full, ds_250], plotfile,
                           'test: ' + os.path.basename(hdf5_file))
Esempio n. 4
0
    def save(self, hfile = None, key = None):
        '''
        save the reduced data group to an HDF5 file, return filename or None if not written

        :param str hfile: output HDF5 file name (default: input HDF5 file)
        :param str key: name of reduced data set (default: nothing will be saved)

        By default, save to the input HDF5 file.
        To override this, specify the output HDF5 file name when calling this method.

        * If the file exists, this will not overwrite any input data.
        * Full, reduced :math:`R(Q)` goes into NXdata group::

            /entry/areaDetector_reduced_full

        * any previous full reduced :math:`R(Q)` will be replaced.

        * It may replace the rebinned, reduced :math:`R(Q)`
          if a NXdata group of the same number of bins exists.
        * Rebinned, reduced :math:`R(Q)`  goes into NXdata group::

              /entry/areaDetector_reduced_<N>

          where ``<N>`` is the number of bins, such as (for 500 bins)::

              /entry/areaDetector_reduced_500

        :see: http://download.nexusformat.org/doc/html/classes/base_classes/NXentry.html
        :see: http://download.nexusformat.org/doc/html/classes/base_classes/NXdata.html
        '''
        key = str(key)
        if key not in self.reduced:
            return
        nxname = 'areaDetector_reduced_' + key
        hfile = hfile or self.hdf5_file_name
        ds = self.reduced[key]
        try:
            hdf = h5py.File(hfile, 'a')
        except IOError as _exc:
            # FIXME: some h5py problem in <h5py>/_hl/files.py, line 101
            # this fails: fid = h5f.open(name, h5f.ACC_RDWR, fapl=fapl)
            # with IOError that is improperly caught on next and then:
            # fid = h5f.create(name, h5f.ACC_EXCL, fapl=fapl, fcpl=fcpl) fails with IOError
            # since the second call has "name" with all lower case
            #
            # real problem is that these HDF5 files have the wrong uid/gid, as set by the Pilatus computer
            # TODO: fix each Pilatus and this problem will go away
            # TODO: change uid/gid on all the acquired HDF5 files (*.h5, *.hdf) under usaxscontrol:/share1/USAXS_data/2*
            # Files should be owned by usaxs:usaxs (1810:2026), but are owned by tomo2:usaxs (500:2026) as seen by usaxs@usaxscontrol
            # not enough to change the "umask" on the det@dec1122 computer, what else will fix this?
            pvwatch.logMessage( "Problem writing reduced data back to file: " + hfile )
            return
        if 'default' not in hdf.attrs:
            hdf.attrs['default'] = 'entry'
        nxentry = eznx.openGroup(hdf, 'entry', 'NXentry')
        if 'default' not in nxentry.attrs:
            nxentry.attrs['default'] = nxname
        nxdata = eznx.openGroup(nxentry,
                                nxname,
                                'NXdata',
                                signal='R',
                                axes='Q',
                                Q_indices=0,
                                timestamp=calc.iso8601_datetime(),
                                )
        for key in sorted(ds.keys()):
            try:
                _ds = eznx.write_dataset(nxdata, key, ds[key])
                if key in self.units:
                    eznx.addAttributes(_ds, units=self.units[key])
            except RuntimeError as e:
                pass        # TODO: reporting
        hdf.close()
        return hfile