Пример #1
0
def pivlab2hdf5(dirbase):
    parentdir = os.path.dirname(dirbase)
    # glob directories generated by a pivlab code
    datadirs = glob.glob(dirbase + '/*')
    for datadir in datadirs:
        print 'Processing %s' % datadir

        datafiles = glob.glob(datadir + '/*.txt')
        for i, datafile in enumerate(datafiles):
            if i % 100 == 0:
                print '%d / %d' % (i, len(datafiles))
            data = np.loadtxt(datafile, delimiter=',', skiprows=3)

            xx, yy = data[:, 0], data[:, 1]
            ux, uy = data[:, 2], data[:, 3]
            omega = data[:, 4]

            if i == 0:
                delta_y = np.diff(yy)[0]
                delta_x = delta_y

                ncols = int((np.max(xx) - np.min(xx)) / delta_x) + 1
                nrows = int((np.max(yy) - np.min(yy)) / delta_y) + 1
                shape_temp = (ncols, nrows)

                xgrid, ygrid = xx.reshape(shape_temp).T, yy.reshape(shape_temp).T

            ux_grid, uy_grid, omega_grid = ux.reshape(shape_temp).T, uy.reshape(shape_temp).T, omega.reshape(
                shape_temp).T

            if i == 0:
                uxdata = np.zeros((nrows, ncols, len(datafiles)))
                uydata = np.zeros((nrows, ncols, len(datafiles)))
                omegadata = np.zeros((nrows, ncols, len(datafiles)))
            uxdata[..., i] = ux_grid
            uydata[..., i] = uy_grid
            omegadata[..., i] = omega_grid

        savedata = {}
        savedata['x'] = xgrid
        savedata['y'] = ygrid
        savedata['ux'] = uxdata
        savedata['uy'] = uydata
        savedata['omega'] = omegadata

        hdf5path = parentdir + '/hdf5data/' + os.path.split(datadir)[1]
        rw.write_hdf5_dict(hdf5path, savedata)

    print '... Done'
Пример #2
0
                       subplot=122,
                       figsize=(16, 8))
graph.labelaxes(ax31, r'$U_x$ (px/frame)', 'Probability density')
graph.labelaxes(ax32, r'$U_y$ (px/frame)', 'Probability density')
fig3.tight_layout()

filename = 'lamb_oseen_vortex_ux_uy'
filepath = os.path.join(savedir + filename)
graph.save(filepath, fignum=1)

filename = 'lamb_oseen_vortex_omegaz'
filepath = os.path.join(savedir + filename)
graph.save(filepath, fignum=2)

filename = 'lamb_oseen_vortex_ux_uy_pdf'
filepath = os.path.join(savedir + filename)
graph.save(filepath, fignum=3)

# Output velocity/vorticity field in hdf5
data = {}
data['x'] = xgrid  # px
data['y'] = ygrid  # px
data['ux'] = ugrid  # px/frame
data['uy'] = vgrid  # px/frame
data['omega'] = omega  #1/frame

hdf5dir = os.path.join(filedir, 'hdf5data/')
filename = dirname
rw.write_hdf5_dict(hdf5dir + filename, data)

graph.show()
Пример #3
0
    data['sigma_uy'] = sigmas_uy
    data['mu_ux_gauss'] = gauss_peaks_ux
    data['mu_uy_gauss'] = gauss_peaks_uy
    data['gamma_ux'] = gammas_ux
    data['gamma_uy'] = gammas_uy
    data['mu_ux_lorentz'] = lorentz_peaks_ux
    data['mu_uy_lorentz'] = lorentz_peaks_uy
    data['sigma_ux_err'] = sigmas_ux_err
    data['sigma_uy_err'] = sigmas_uy_err
    data['mu_ux_gauss_err'] = gauss_peaks_ux_err
    data['mu_uy_gauss_err'] = gauss_peaks_uy_err
    data['gamma_ux_err'] = gammas_ux_err
    data['gamma_uy_err'] = gammas_uy_err
    data['mu_ux_lorentz_err'] = lorentz_peaks_ux_err
    data['mu_uy_lorentz_err'] = lorentz_peaks_uy_err
    rw.write_hdf5_dict(datafilename, data)

else:
    print 'Noise fit results already exist!'
    data = rw.read_hdf5(datafilepath)
    iws, disps = np.array(data['inter_wid_width']), np.array(data['displacement'])
    sigmas_ux, sigmas_uy, gauss_peaks_ux, gauss_peaks_uy = np.array(data['sigma_ux']), np.array(data['sigma_uy']), np.array(data['mu_ux_gauss']), np.array(data['mu_uy_gauss'])
    gammas_ux, gammas_uy, lorentz_peaks_ux, lorentz_peaks_uy = np.array(data['gamma_ux']), np.array(data['gamma_uy']), np.array(data['mu_ux_lorentz']), np.array(data['mu_uy_lorentz'])
    sigmas_ux_err, sigmas_uy_err, gauss_peaks_ux_err, gauss_peaks_uy_err = np.array(data['sigma_ux_err']), np.array(data['sigma_uy_err']), np.array(data['mu_ux_gauss_err']), np.array(data['mu_uy_gauss_err'])
    gammas_ux_err, gammas_uy_err, lorentz_peaks_ux_err, lorentz_peaks_uy_err = np.array(data['gamma_ux_err']), np.array(data['gamma_uy_err']), np.array(data['mu_ux_lorentz_err']), np.array(data['mu_uy_lorentz_err'])


# Now make a heat map
iw_temp, disp_temp = np.linspace(0, 80, 81), np.linspace(0, 80, 81)
iw_grid, disp_grid = np.meshgrid(iw_temp, disp_temp)
sigma_ux_int = griddata(np.array(zip(iws, disps)), np.abs(np.array(sigmas_ux)), (iw_grid, disp_grid), method='nearest')
                label=r'$\bar{E}_{2D}=\frac{1}{2}(\bar{U_x}^2)$',
                option='scientific')
            graph.labelaxes(ax, 'X (px)', 'Y (px)')
            fig.tight_layout()
            filename = '/time_avg_energy_%s/im%05d' % (args.mode, i)
            graph.save(args.dir + filename, ext='png', close=True)

        import library.tools.rw_data as rw
        savedata = {}
        savedata['x'] = xx
        savedata['y'] = yy
        savedata['ux'] = grid_ux
        savedata['uy'] = grid_uy
        savedata['energy'] = grid_e
        filepath = args.dir + '/grid_data_%s' % args.mode
        rw.write_hdf5_dict(filepath, savedata)
        # plt.show()

else:
    # Load setup data and figure out how to merge data
    laser_pos = np.empty(ndata)
    x0s = np.empty(ndata)
    zz2_top, zz2_bottom = np.empty(ndata), np.empty(ndata)
    z0s = np.empty(ndata)
    dthetas = np.empty(ndata)

    # make a list of dictionaries, Each dictionary has a different id
    dummy_func = lambda x: copy.deepcopy(x)
    setups = [dummy_func({}) for i in range(((ndata)))]

    for i in range(ndata):
                # print pos, deltax, pts[indices], i, j, k

                eei[j][i][k] = np.nanmean(energy_ravelled[indices])
                # print indices, eei[i][j][k]

                # break
                # distances, indices = tree.query(pos, k=3) # get 2 nearest neighbors
                # print distances

    import library.tools.rw_data as rw
    savedata = {}
    savedata['x'] = xxi
    savedata['y'] = yyi
    savedata['z'] = zzi
    savedata['energy'] = eei
    rw.write_hdf5_dict(dir + '/interp_data_1p0_test_deltaz_%.1f_-30' % deltaz,
                       savedata)

    ## Does not work.
    # from scipy.interpolate import LinearNDInterpolator
    # int_func = LinearNDInterpolator(pts, data)
    ## griddata does not work

    #
    # # width, height, depth = ux.shape
    #
    # # Read a sample setup file to extract scale
    # # Load setup file
    # setup_str = open(setup_files[0], 'rt').read()
    # setup = {}
    # dummy = {}
    # exec ("from math import *", dummy)
Пример #6
0
                                                       180.)
    slope = -(args.max - args.min) / np.sqrt(
        (args.width * np.cos(args.theta * np.pi / 180.))**2 +
        (args.height * np.sin(args.theta * np.pi / 180.))**2)
    rgrid = xgrid * np.cos(args.theta * np.pi / 180.) + ygrid * np.sin(
        args.theta * np.pi / 180.)
    vel_mag = np.ones((args.height, args.width)) * args.max + slope * rgrid
    ugrid, vgrid = vel_mag * np.cos(
        args.theta * np.pi / 180.), vel_mag * np.sin(args.theta * np.pi / 180.)
    # ugrid = np.ones((args.height, args.width)) * args.mag * np.cos(args.theta * np.pi / 180.)
    # vgrid = np.ones((args.height, args.width)) * args.mag * np.sin(args.theta * np.pi / 180.)
    # print ugrid.shape, xgrid.shape

    vel = np.stack((ugrid, vgrid))
    omega = vec.curl(vel)

# # Debugging purpose
# fig1 = plt.figure(1)
# ax1 = fig1.add_subplot(111)
# ax1.quiver(xgrid, ygrid, ugrid, vgrid)
# plt.show()

data = {}
data['x'] = xgrid
data['y'] = ygrid
data['ux'] = ugrid
data['uy'] = vgrid
data['omega'] = omega  #for unidirectional flow, vorticity is zero
datapath = args.resultdir + filename
rw.write_hdf5_dict(datapath, data)
Пример #7
0
                                     yy,
                                     n_bins,
                                     option=option)
    else:
        _, DLL = compute_struc_func(ux0,
                                    ux0_noise,
                                    xx,
                                    yy,
                                    n_bins,
                                    option=option)
    rr_scaled = rr / eta
    Dxx_scaled = DLL / ((epsilon * rr)**(2. / 3))
    fig, ax = graph.plot(rr_scaled, Dxx_scaled, label=labels[i], alpha=0.9)
    fig, ax = graph.scatter(rr_scaled, Dxx_scaled, alpha=0.9)

    data['rr_scaled_' + option] = rr_scaled
    data['Dxx_scaled_' + option] = Dxx_scaled
    data['rr_' + option] = rr
    data['Dxx_' + option] = DLL

graph.tosemilogx(ax)
ax.legend(loc=2)
graph.labelaxes(ax, '$r/\eta$', r'$D_{xx}/(\epsilon r)^{2/3}$')
#graph.setaxes(ax, 1, 5000, -0.2, 4)
graph.axhband(ax, 2 * 0.85, 2 * 1.15, 1, 5000)
graph.save(savedir + 'jhtd_struc_func_scaled_white_noise_budget_test')
# graph.show()

datafile = 'jhtd_struc_func_scaled_white_noise_budget_test_data'
rw.write_hdf5_dict(savedir + datafile, data)
Пример #8
0
        #              'bins_ux_jhtd_c', 'hist_ux_jhtd_c', 'bins_uy_jhtd_c', 'hist_uy_jhtd_c',
        #              'bins_ux_diff_c', 'hist_ux_diff_c', 'bins_uy_diff_c', 'hist_uy_diff_c',
        #              'bins_ux_jhtd_avg', 'hist_ux_jhtd_avg', 'bins_uy_jhtd_avg', 'hist_uy_jhtd_avg',
        #              'bins_ux_diff_avg', 'hist_ux_diff_avg', 'bins_uy_diff_avg', 'hist_uy_diff_avg']
        # for dataname, content in zip(datanames, histdata_list):
        #     histdata[dataname] = content
        #
        # datafilepath = os.path.join(parentdir, 'pdf_data_local/' + resultsubdirname)
        # rw.write_hdf5_dict(datafilepath, histdata)
        #

        err_data = {}
        datanames = ['ux_mean', 'uy_mean', 'ux_center_temp', 'uy_center_temp',
                     'ux_err_mean_temp', 'uy_err_mean_temp', 'ux_err_center_temp', 'uy_err_center_temp']
        for dataname, content in zip(datanames, errdata_list):
            err_data[dataname] = content

        datafilepath = os.path.join(parentdir, 'err_data/' + resultsubdirname)
        rw.write_hdf5_dict(datafilepath, err_data)
    except:
        piv_data.close()
        continue


    piv_data.close()


jhtd_data.close()

print 'Done'