Пример #1
0
def load_pixel_averaged_from_raw(h5_file, verbose=True, loadverbose=True):
    """
    Creates a new group FF_Avg where the FF_raw file is averaged together.

    This is more useful as pixel-wise averages are more relevant in FF-processing

    This Dataset is (n_pixels*n_rows, n_pnts_per_avg)

    :param h5_file: H5 File to be examined. File typically set as h5_file = hdf.file
        hdf = px.ioHDF5(h5_path), h5_path = path to disk
    :type h5_file: h5py File
        
    :param verbose: Display outputs of each function or not
    :type verbose: bool, optional
        
    :param loadverbose: Whether to print any simple "loading Line X" statements for feedback
    :type loadverbose: bool, optional
        
    :returns: The new averaged Dataset
    :rtype: Dataset
        
    """

    hdf = h5py.File(h5_file)
    h5_main = usid.hdf_utils.find_dataset(hdf.file, 'FF_Raw')[0]

    try:
        ff_avg_group = h5_main.parent.create_group('FF_Avg')
    except:
        ff_avg_group = usid.hdf_utils.create_indexed_group(h5_main.parent, 'FF_Avg')

    parm_dict = get_attributes(h5_main.parent)

    num_rows = parm_dict['num_rows']
    num_cols = parm_dict['num_cols']
    pnts_per_avg = parm_dict['pnts_per_avg']
    pnts_per_line = parm_dict['pnts_per_line']
    pnts_per_pixel = parm_dict['pnts_per_pixel']
    parm_dict['pnts_per_pixel'] = 1  # only 1 average per pixel now
    parm_dict['pnts_per_line'] = num_cols  # equivalent now with averaged data
    n_pix = int(pnts_per_line / pnts_per_pixel)
    dt = 1 / parm_dict['sampling_rate']

    # Set up the position vectors for the data
    pos_desc = [Dimension('X', 'm', np.linspace(0, parm_dict['FastScanSize'], num_cols)),
                Dimension('Y', 'm', np.linspace(0, parm_dict['SlowScanSize'], num_rows))]

    spec_desc = [Dimension('Time', 's', np.linspace(0, parm_dict['total_time'], pnts_per_avg))]

    for p in parm_dict:
        ff_avg_group.attrs[p] = parm_dict[p]
    ff_avg_group.attrs['pnts_per_line'] = num_cols  # to change number of pnts in a line
    ff_avg_group.attrs['pnts_per_pixel'] = 1  # to change number of pnts in a pixel

    h5_avg = usid.hdf_utils.write_main_dataset(ff_avg_group,  # parent HDF5 group
                                               (num_rows * num_cols, pnts_per_avg),  # shape of Main dataset
                                               'FF_Avg',  # Name of main dataset
                                               'Deflection',  # Physical quantity contained in Main dataset
                                               'V',  # Units for the physical quantity
                                               pos_desc,  # Position dimensions
                                               spec_desc,  # Spectroscopic dimensions
                                               dtype=np.float32,  # data type / precision
                                               compression='gzip',
                                               main_dset_attrs=parm_dict)

    # Uses get_line to extract line. Averages and returns to the Dataset FF_Avg
    # We can operate on the dataset array directly, get_line is used for future_proofing if
    #  we want to add additional operation (such as create an Image class)
    for i in range(num_rows):

        if loadverbose == True:
            print('#### Row:', i, '####')

        _ll = get_utils.get_line(h5_main, pnts=pnts_per_line, line_num=i, array_form=False, avg=False)
        _ll = _ll.pixel_wise_avg()

        h5_avg[i * num_cols:(i + 1) * num_cols, :] = _ll[:, :]

    if verbose == True:
        usid.hdf_utils.print_tree(hdf.file, rel_paths=True)
        h5_avg = usid.hdf_utils.find_dataset(hdf.file, 'FF_Avg')[0]

        print('H5_avg of size:', h5_avg.shape)

    hdf.flush()

    return h5_avg
Пример #2
0
def process(h5_file,
            ds='FF_Raw',
            ref='',
            clear_filter=False,
            verbose=True,
            liveplots=True,
            **kwargs):
    """
	Processes FF_Raw dataset in the HDF5 file
	
	This then saves within the h5 file in FF_Group-processed
	
	This uses the dataset in this priority:
		\*A relative path specific by ref, e.g. '/FF_Group/FF_Avg/FF_Avg'
		\*A dataset specified by ds, returning the last found, e.g. 'FF_Raw'
		\*FF_Group/FF_Raw found via searching from hdf_utils folder
	
	Typical usage:
	>> import pycroscopy as px
	>> h5_file = px.io.HDFwriter('path_to_h5_file.h5').file
	>> from ffta import analyze_h5
	>> tfp, shift, inst_freq = analyze_h5.process(h5_file, ref = '/FF_Group/FF_Avg/FF_Avg')
	
	Parameters
	----------
	h5_file : h5Py file or str
		Path to a specific h5 file on the disk or an hdf.file
		
	ds : str, optional
		The Dataset to search for in the file
		
	ref : str, optional
		A path to a specific dataset in the file.
		e.g. h5_file['/FF_Group/FF_Avg/FF_Avg']
		
	clear_filter : bool, optional
		For data already filtered, calls Line's clear_filter function to 
			skip FIR/windowing steps
	
	verbose : bool, optional,
		Whether to write data to the command line
	
	liveplots : bool
		Displaying can sometimes cause the window to pop in front of other active windows
		in Matplotlib. This disables it, with an obvious drawback of no feedback.
	
	Returns
	-------
	tfp : ndarray
		time-to-first-peak image array
	shift : ndarray
		frequency shift image array
	inst_freq : ndarray (2D)
		instantaneous frequency array, an N x p array of N=rows\*cols points
			and where p = points_per_signal (e.g. 16000 for 1.6 ms @10 MHz sampling)
	h5_if : USIDataset of h5_if (instantaneous frequency)
	
	"""
    #    logging.basicConfig(filename='error.log', level=logging.INFO)
    ftype = str(type(h5_file))

    if ('str' in ftype) or ('File' in ftype) or ('Dataset' in ftype):

        h5_file = px.io.HDFwriter(h5_file).file

    else:

        raise TypeError('Must be string path, e.g. E:\Test.h5')

    # Looks for a ref first before searching for ds, h5_ds is group to process
    if any(ref):
        h5_ds = h5_file[ref]
        parameters = get_utils.get_params(h5_ds)

    elif ds != 'FF_Raw':
        h5_ds = usid.hdf_utils.find_dataset(h5_file, ds)[-1]
        parameters = get_utils.get_params(h5_ds)

    else:
        h5_ds, parameters = find_FF(h5_file)

    if isinstance(h5_ds, h5py.Dataset):
        h5_gp = h5_ds.parent

    # Initialize file and read parameters
    num_cols = parameters['num_cols']
    num_rows = parameters['num_rows']
    pnts_per_pixel = parameters['pnts_per_pixel']
    pnts_per_avg = parameters['pnts_per_avg']

    for key, value in kwargs.items():
        _temp = parameters[key]
        parameters[key] = value
        if verbose:
            print('Changing', key, 'from', _temp, 'to', value)

    if verbose:
        print('Recombination: ', parameters['recombination'])
        print('ROI: ', parameters['roi'])

    # Initialize arrays.
    tfp = np.zeros([num_rows, num_cols])
    shift = np.zeros([num_rows, num_cols])
    inst_freq = np.zeros([num_rows * num_cols, pnts_per_avg])

    # Initialize plotting.

    plt.ion()
    fig, a = plt.subplots(nrows=2, ncols=2, figsize=(13, 6))

    tfp_ax = a[0][1]
    shift_ax = a[1][1]

    img_length = parameters['FastScanSize']
    img_height = parameters['SlowScanSize']
    kwargs = {
        'origin': 'lower',
        'x_vec': img_length * 1e6,
        'y_vec': img_height * 1e6,
        'num_ticks': 5,
        'stdevs': 3
    }

    try:
        ht = h5_file['/height/Raw_Data'][:, 0]
        ht = np.reshape(ht, [num_cols, num_rows]).transpose()
        ht_ax = a[0][0]
        ht_image, cbar = usid.viz.plot_utils.plot_map(ht_ax,
                                                      ht * 1e9,
                                                      cmap='gray',
                                                      **kwargs)
        cbar.set_label('Height (nm)', rotation=270, labelpad=16)
    except:
        pass

    tfp_ax.set_title('tFP Image')
    shift_ax.set_title('Shift Image')

    tfp_image, cbar_tfp = usid.viz.plot_utils.plot_map(tfp_ax,
                                                       tfp * 1e6,
                                                       cmap='inferno',
                                                       show_cbar=False,
                                                       **kwargs)
    shift_image, cbar_sh = usid.viz.plot_utils.plot_map(shift_ax,
                                                        shift,
                                                        cmap='inferno',
                                                        show_cbar=False,
                                                        **kwargs)
    text = tfp_ax.text(num_cols / 2, num_rows + 3, '')
    plt.show()

    print('Analyzing with roi of', parameters['roi'])

    # Load every file in the file list one by one.
    for i in range(num_rows):

        line_inst = get_utils.get_line(h5_ds, i, params=parameters)

        if clear_filter:
            line_inst.clear_filter_flags()

        _tfp, _shf, _if = line_inst.analyze()
        tfp[i, :] = _tfp.T
        shift[i, :] = _shf.T
        inst_freq[i * num_cols:(i + 1) * num_cols, :] = _if.T

        if liveplots:
            tfp_image, _ = usid.viz.plot_utils.plot_map(tfp_ax,
                                                        tfp * 1e6,
                                                        cmap='inferno',
                                                        show_cbar=False,
                                                        **kwargs)
            shift_image, _ = usid.viz.plot_utils.plot_map(shift_ax,
                                                          shift,
                                                          cmap='inferno',
                                                          show_cbar=False,
                                                          **kwargs)

            tfp_sc = tfp[tfp.nonzero()] * 1e6
            tfp_image.set_clim(vmin=tfp_sc.min(), vmax=tfp_sc.max())

            shift_sc = shift[shift.nonzero()]
            shift_image.set_clim(vmin=shift_sc.min(), vmax=shift_sc.max())

            tfpmean = 1e6 * tfp[i, :].mean()
            tfpstd = 1e6 * tfp[i, :].std()

            if verbose:
                string = ("Line {0:.0f}, average tFP (us) ="
                          " {1:.2f} +/- {2:.2f}".format(
                              i + 1, tfpmean, tfpstd))
                print(string)

                text.remove()
                text = tfp_ax.text((num_cols - len(string)) / 2, num_rows + 4,
                                   string)

            # plt.draw()
            plt.pause(0.0001)

        del line_inst  # Delete the instance to open up memory.

    tfp_image, cbar_tfp = usid.viz.plot_utils.plot_map(tfp_ax,
                                                       tfp * 1e6,
                                                       cmap='inferno',
                                                       **kwargs)
    cbar_tfp.set_label('Time (us)', rotation=270, labelpad=16)
    shift_image, cbar_sh = usid.viz.plot_utils.plot_map(shift_ax,
                                                        shift,
                                                        cmap='inferno',
                                                        **kwargs)
    cbar_sh.set_label('Frequency Shift (Hz)', rotation=270, labelpad=16)
    text = tfp_ax.text(num_cols / 2, num_rows + 3, '')

    plt.show()

    h5_if = save_IF(h5_ds.parent, inst_freq, parameters)
    _, _, tfp_fixed = save_ht_outs(h5_if.parent, tfp, shift)

    # save_CSV(h5_path, tfp, shift, tfp_fixed, append=ds)

    if verbose:
        print(
            'Please remember to close the H5 file explicitly when you are done to retain these data',
            'e.g.:', 'h5_if.file.close()',
            '...and then reopen the file as needed.')

    return tfp, shift, inst_freq, h5_if
Пример #3
0
def hdf_commands(h5_path, ds='FF_Raw'):
	"""
	Creates a bunch of typical workspace HDF5 variables for scripting use

	Parameters
	----------
	h5_path : str
		String path to H5PY file

	ds : str, optional
		The dataset to search for and set as h5_main.

	This prints the valid commands to the workspace. Then just highlight and
		copy-paste to execute

	h5_path : str
		Path to hdf5 file on disk
	"""

	commands = ['***Copy-paste all commands below this line, then hit ENTER***',
				'import h5py']

	if not isinstance(h5_path, str):
		raise TypeError('Pass a file path (string), not an H5 file')

	try:
		hdf = h5py.File(h5_path, 'r+')
		commands.append("hdf = h5py.File(h5_path, 'r+')")
	except:
		pass

	try:
		h5_file = hdf.file
		commands.append("h5_file = hdf.file")
	except:
		pass

	try:
		h5_main = usid.hdf_utils.find_dataset(hdf.file, ds)[0]
		commands.append("h5_main = usid.hdf_utils.find_dataset(hdf.file, '" + ds + "')[0]")
	except:
		pass

	try:
		h5_if = usid.hdf_utils.find_dataset(hdf.file, 'inst_freq')[-1]
		commands.append("h5_if = usid.hdf_utils.find_dataset(hdf.file, 'inst_freq')[-1]")
	except:
		pass

	try:
		h5_if = usid.hdf_utils.find_dataset(hdf.file, 'Inst_Freq')[-1]
		commands.append("h5_if = usid.hdf_utils.find_dataset(hdf.file, 'Inst_Freq')[-1]")
	except:
		pass

	try:
		h5_tfp = usid.hdf_utils.find_dataset(hdf.file, 'tfp')[-1]
		commands.append("h5_tfp= usid.hdf_utils.find_dataset(hdf.file, 'tfp')[-1]")
	except:
		pass

	try:
		h5_shift = usid.hdf_utils.find_dataset(hdf.file, 'shift')[-1]
		commands.append("h5_shift= usid.hdf_utils.find_dataset(hdf.file, 'shift')[-1]")
	except:
		pass

	try:
		h5_avg = usid.hdf_utils.find_dataset(hdf.file, 'FF_Avg')[-1]
		commands.append("h5_avg = usid.hdf_utils.find_dataset(hdf.file, 'FF_Avg')[-1]")
	except:
		pass

	try:
		h5_filt = usid.hdf_utils.find_dataset(hdf.file, 'Filtered_Data')[-1]
		commands.append("h5_filt = usid.hdf_utils.find_dataset(hdf.file, 'Filtered_Data')[-1]")
	except:
		pass

	try:
		h5_rb = usid.hdf_utils.find_dataset(hdf.file, 'Rebuilt_Data')[-1]
		commands.append("h5_rb = usid.hdf_utils.find_dataset(hdf.file, 'Rebuilt_Data')[-1]")
	except:
		pass

	try:
		parameters = usid.hdf_utils.get_attributes(h5_avg)
		commands.append("parameters = usid.hdf_utils.get_attributes(h5_avg)")
	except:
		pass

	try:
		h5_ll = get_utils.get_line(h5_if, line_num=0)
		commands.append("h5_ll = ffta.load.get_utils.get_line(h5_if, line_num=0)")
	except:
		pass

	try:
		h5_px = get_utils.get_pixel(h5_if, rc=[0, 0])
		commands.append("h5_px = ffta.load.get_utils.get_pixel(h5_if, rc=[0,0])")
	except:
		pass

	try:
		h5_svd = usid.hdf_utils.find_dataset(hdf.file, 'U')[-1]
		commands.append("h5_svd = usid.hdf_utils.find_dataset(hdf.file, 'U')[-1].parent")
	except:
		pass

	try:
		h5_cpd = usid.hdf_utils.find_dataset(hdf.file, 'cpd')[-1]
		commands.append("h5_cpd = usid.hdf_utils.find_dataset(hdf.file, 'cpd')[-1]")
	except:
		pass

	try:
		h5_ytime = usid.hdf_utils.find_dataset(hdf.file, 'y_time')[-1]
		commands.append("h5_ytime = usid.hdf_utils.find_dataset(hdf.file, 'y_time')[-1]")
	except:
		pass

	try:
		h5_Y = usid.hdf_utils.find_dataset(hdf.file, 'Y')[-1]
		commands.append("h5_Y = usid.hdf_utils.find_dataset(hdf.file, 'Y')[-1]")
	except:
		pass

	for i in commands:
		print(i)

	return