Beispiel #1
0
def plot(hdf5file, fileext, ifile, plot_all):
    print "Plotting", hdf5file

    # Load a stats file
    f = h5py.File(hdf5file,'r')

    # Grab number of collocation points and B-spline order
    Ny=f['Ny'].value[0]
    k=f['k'].value[0]

    # Grab collocation points
    y = f['collocation_points_y'].value

    # Grab number of species
    Ns=f['antioch_constitutive_data'].attrs['Ns'][0]

    # Grab species names
    sname= np.chararray(Ns, itemsize=5)
    for s in xrange(0,Ns):
      sname[s]=f['antioch_constitutive_data'].attrs['Species_'+str(s)]

    # Get "mass" matrix and convert to dense format
    D0T_gb = f['Dy0T'].value
    D0T = gb.gb2ge(D0T_gb, Ny, k-2)

    # Get "mass" matrix and convert to dense format
    D1T_gb = f['Dy1T'].value
    D1T = gb.gb2ge(D1T_gb, Ny, k-2)

    # Get "mass" matrix and convert to dense format
    D2T_gb = f['Dy2T'].value
    D2T = gb.gb2ge(D2T_gb, Ny, k-2)

    # Grab rho coefficients
    rho_coeff = f['bar_rho'].value
    rho_coeff = np.array(rho_coeff).reshape(Ny,1)


    # Grab rho_u coefficients
    rho_u_coeff = f['bar_rho_u'].value
    rho_u_coeff = np.array(rho_u_coeff).transpose().reshape(Ny,3)

    # Grab rho_E coefficients
    rho_E_coeff = f['bar_rho_E'].value
    rho_E_coeff = np.array(rho_E_coeff).reshape(Ny,1)

    # Grab T coefficients
    T_coeff = f['bar_T'].value
    T_coeff = np.array(T_coeff).reshape(Ny,1)

    # Grab T coefficients
    T_T_coeff = f['bar_T_T'].value
    T_T_coeff = np.array(T_T_coeff).reshape(Ny,1)

    # Grab rho_u_u coefficients
    rho_u_u_coeff = f['bar_rho_u_u'].value
    rho_u_u_coeff = np.array(rho_u_u_coeff).transpose().reshape(Ny,6)

    # Grab rho_s coefficients
    rho_s_coeff = f['bar_rho_s'].value
    rho_s_coeff = np.array(rho_s_coeff).transpose().reshape(Ny,Ns)

    # Grab reaction rate coefficients
    om_s_coeff = f['bar_om_s'].value
    om_s_coeff = np.array(om_s_coeff).transpose().reshape(Ny,Ns)

    # Grab p coefficients
    p_coeff = f['bar_p'].value
    p_coeff = np.array(p_coeff).reshape(Ny,1)

    # Grab a coefficients
    a_coeff = f['bar_a'].value
    a_coeff = np.array(a_coeff).reshape(Ny,1)

    # Grid delta y, colocation points
    dy = np.diff(y)
    dy = np.append(dy,dy[Ny-2])
    dy = np.array(dy).reshape(Ny,1)

    # Grab mu coefficients
    mu_coeff = f['bar_mu'].value
    mu_coeff = np.array(mu_coeff).reshape(Ny,1)

    # Grab nu coefficients
    nu_coeff = f['bar_nu'].value
    nu_coeff = np.array(nu_coeff).reshape(Ny,1)

    # Grab breakpoints
    yb = f['breakpoints_y'].value

    # Grid delta y, breakpoints
    dyb = np.diff(yb)
    dyb = np.append(dyb,dyb[Ny-6])
    dyb = np.array(dyb).reshape(Ny-4,1)

    # load baseflow coefficients
    base_rho   = None
    base_rho_u = None
    base_rho_v = None
    base_rho_w = None
    base_rho_E = None
    base_p     = None
    if "largo_baseflow" in f:
      if f['largo_baseflow'].attrs['coefficient_base'] == 'polynomial':
        baseflow_coeff = f['largo_baseflow'].value
        # print 'baseflow coefficients loaded'
        npoly = baseflow_coeff.shape[1]
        base_rho   = np.zeros((Ny,1))
        base_rho_u = np.zeros((Ny,1))
        base_rho_v = np.zeros((Ny,1))
        base_rho_w = np.zeros((Ny,1))
        base_rho_E = np.zeros((Ny,1))
        base_p     = np.zeros((Ny,1))
        for i in xrange(0,npoly):
          for j in xrange(0, Ny):
            y_power_i = np.power(y[j,],i)
            base_rho  [j,0] += y_power_i  * baseflow_coeff[0,i]
            base_rho_u[j,0] += y_power_i  * baseflow_coeff[1,i]
            base_rho_v[j,0] += y_power_i  * baseflow_coeff[2,i]
            base_rho_w[j,0] += y_power_i  * baseflow_coeff[3,i]
            base_rho_E[j,0] += y_power_i  * baseflow_coeff[4,i]
            base_p    [j,0] += y_power_i  * baseflow_coeff[5,i]
      #else:
        # skip loading
        # print 'baseflow coefficients not polynomial'

    # Done getting data
    f.close()

    D0 = D0T.transpose()
    D1 = D1T.transpose()
    D2 = D2T.transpose()

    # Coefficients -> Collocation points
    rho_col     = D0*rho_coeff
    rho_u_col   = D0*rho_u_coeff
    rho_E_col   = D0*rho_E_coeff
    T_col       = D0*T_coeff
    T_T_col     = D0*T_T_coeff
    rho_u_u_col = D0*rho_u_u_coeff
    rho_s_col   = D0*rho_s_coeff
    om_s_col    = D0*om_s_coeff
    mu_col      = D0*mu_coeff
    nu_col      = D0*nu_coeff
    p_col       = D0*p_coeff
    a_col       = D0*a_coeff

    rho_E_col_y  = D1*rho_E_coeff
    rho_E_col_yy = D2*rho_E_coeff
    p_col_y      = D1*p_coeff
    p_col_yy     = D2*p_coeff
    rho_col_y    = D1*rho_coeff
    rho_col_yy   = D2*rho_coeff


    # Computed quantities
    # - Favre averages
    fav_u       = np.array(rho_u_col[:,0]/rho_col[:,0]).reshape(Ny,1)
    fav_v       = np.array(rho_u_col[:,1]/rho_col[:,0]).reshape(Ny,1)
    fav_w       = np.array(rho_u_col[:,2]/rho_col[:,0]).reshape(Ny,1)

    fav_H       = np.array((rho_E_col[:,0] + p_col[:,0])/rho_col[:,0]).reshape(Ny,1)

    if (plot_all):
        # d(d(\fav{H})/dy)/dy
        rho_col_2   =    np.multiply(rho_col  [:,0], rho_col  [:,0]).reshape(Ny,1)
        rho_col_3   =    np.multiply(rho_col_2[:,0], rho_col  [:,0]).reshape(Ny,1)
        rho_col_y2  =    np.multiply(rho_col_y[:,0], rho_col_y[:,0]).reshape(Ny,1)
        fav_H_yy    =    np.array   ((rho_E_col_yy[:,0] + p_col_yy[:,0])/rho_col  [:,0]                 ).reshape(Ny,1)
        fav_H_yy   -= 2.*np.multiply((rho_E_col_y [:,0] + p_col_y [:,0])/rho_col_2[:,0], rho_col_y [:,0]).reshape(Ny,1)
        fav_H_yy   -=    np.multiply((rho_E_col   [:,0] + p_col   [:,0])/rho_col_2[:,0], rho_col_yy[:,0]).reshape(Ny,1)
        fav_H_yy   += 2.*np.multiply((rho_E_col   [:,0] + p_col   [:,0])/rho_col_3[:,0], rho_col_y2[:,0]).reshape(Ny,1)


    # - Bar rho_upp
    # rho_upp     = np.array(np.ravel(rho_col) * np.ravel(fav_u)).reshape(Ny,1)
    rho_upp     = rho_u_col[:,0] - np.array(np.ravel(rho_col) * np.ravel(fav_u)).reshape(Ny,1)
    rho_vpp     = rho_u_col[:,1] - np.array(np.ravel(rho_col) * np.ravel(fav_v)).reshape(Ny,1)
    rho_wpp     = rho_u_col[:,2] - np.array(np.ravel(rho_col) * np.ravel(fav_w)).reshape(Ny,1)

    # - Reynolds stresses
    R_u_u_col   = rho_u_u_col[:,0] - np.multiply(np.multiply(rho_u_col[:,0], rho_u_col[:,0]).reshape(Ny,1), 1/rho_col[:,0])
    R_u_u_col  += 2.0 * np.array(np.ravel(rho_upp) * np.ravel(fav_u)).reshape(Ny,1)

    R_u_v_col   = rho_u_u_col[:,1] - np.multiply(np.multiply(rho_u_col[:,0], rho_u_col[:,1]).reshape(Ny,1), 1/rho_col[:,0])
    R_u_v_col  += np.array(np.ravel(rho_upp) * np.ravel(fav_v)).reshape(Ny,1) + np.array(np.ravel(rho_vpp) * np.ravel(fav_u)).reshape(Ny,1)

    R_v_v_col   = rho_u_u_col[:,3] - np.multiply(np.multiply(rho_u_col[:,1], rho_u_col[:,1]).reshape(Ny,1), 1/rho_col[:,0])
    R_v_v_col  += 2.0 * np.array(np.ravel(rho_vpp) * np.ravel(fav_v)).reshape(Ny,1)

    R_w_w_col   = rho_u_u_col[:,5] - np.multiply(np.multiply(rho_u_col[:,2], rho_u_col[:,2]).reshape(Ny,1), 1/rho_col[:,0])
    R_w_w_col  += 2.0 * np.array(np.ravel(rho_wpp) * np.ravel(fav_w)).reshape(Ny,1)

    # - Viscous ts
    nub = np.interp(yb,y,np.ravel(nu_col))
    nub = np.array(nub).reshape(Ny-4,1)
    inv_nub = 1/nub
    dssqr_over_2nu = np.multiply(dyb[:,0], dyb[:,0]).reshape(Ny-4,1)
    dssqr_over_2nu = np.multiply(dssqr_over_2nu[:,0], 1/nub[:,0]).reshape(Ny-4,1) * 0.5

    # - Temperature rms
    Tp_Tp = T_T_col - np.multiply(T_col,T_col).reshape(Ny,1)

    # Plots
    figid  = 0
    figid += 1
    pyplot.figure(figid)
    key = "bar_rho_" + str(ifile)
    if (ifile == 0 and base_rho is not None):
      pyplot.plot(y, base_rho[:,0], linewidth=1)
    pyplot.plot(y, rho_col, linewidth=3, label=key)
    pyplot.legend(loc=0)
    pyplot.savefig('bar_rho.' + fileext, bbox_inches='tight')

    figid += 1
    pyplot.figure(figid)
    key = "bar_rho_u" + str(ifile)
    if (ifile == 0 and base_rho_u is not None):
      pyplot.plot(y, base_rho_u[:,0], linewidth=1)
    pyplot.plot(y, rho_u_col[:,0], linewidth=3, label=key)
    pyplot.legend(loc=0)
    pyplot.savefig('bar_rho_u.' + fileext, bbox_inches='tight')

    figid += 1
    pyplot.figure(figid)
    key = "bar_rho_v" + str(ifile)
    if (ifile == 0 and base_rho_v is not None):
      pyplot.plot(y, base_rho_v[:,0], linewidth=1)
    pyplot.semilogx(y, rho_u_col[:,1], linewidth=3, label=key)
    pyplot.legend(loc=0)
    pyplot.savefig('bar_rho_v.' + fileext, bbox_inches='tight')

    figid += 1
    pyplot.figure(figid)
    key = "bar_rho_w" + str(ifile)
    if (ifile == 0 and base_rho_w is not None):
      pyplot.plot(y, base_rho_w[:,0], linewidth=1)
    pyplot.plot(y, rho_u_col[:,2], linewidth=3, label=key)
    pyplot.legend(loc=0)
    pyplot.savefig('bar_rho_w.' + fileext, bbox_inches='tight')

    figid += 1
    pyplot.figure(figid)
    key = "bar_rho_E" + str(ifile)
    if (ifile == 0 and base_rho_E is not None):
      pyplot.plot(y, base_rho_E[:,0], linewidth=1)
    pyplot.plot(y, rho_E_col, linewidth=3, label=key)
    pyplot.legend(loc=0)
    pyplot.savefig('bar_rho_E.' + fileext, bbox_inches='tight')

    figid += 1
    pyplot.figure(figid)
    key = "bar_T" + str(ifile)
    pyplot.semilogx(y, T_col, linewidth=3, label=key)
    pyplot.legend(loc=0)
    pyplot.savefig('bar_T.' + fileext, bbox_inches='tight')

    figid += 1
    pyplot.figure(figid)
    key = "sqrt_Tp_Tp" + str(ifile)
    pyplot.semilogx(y, np.sqrt(Tp_Tp)/T_col, linewidth=3, label=key)
    pyplot.legend(loc=0)
    pyplot.savefig('sqrt_Tp_Tp.' + fileext, bbox_inches='tight')

    figid += 1
    pyplot.figure(figid)
    key = "bar_p" + str(ifile)
    if (ifile == 0 and base_p is not None):
      pyplot.plot(y, base_p[:,0], linewidth=1)
    pyplot.semilogx(y, p_col, linewidth=3, label=key)
    pyplot.legend(loc=0)
    pyplot.savefig('bar_p.' + fileext, bbox_inches='tight')

    figid += 1
    pyplot.figure(figid)
    key = "bar_a" + str(ifile)
    pyplot.semilogx(y, a_col, linewidth=3, label=key)
    pyplot.legend(loc=0)
    pyplot.savefig('bar_a.' + fileext, bbox_inches='tight')

    figid += 1
    pyplot.figure(figid)
    key = "R_u_u" + str(ifile)
    pyplot.semilogx(y, R_u_u_col, linewidth=3, label=key)
    pyplot.legend(loc=0)
    pyplot.savefig('R_u_u.' + fileext, bbox_inches='tight')

    figid += 1
    pyplot.figure(figid)
    key = "R_u_v" + str(ifile)
    pyplot.semilogx(y, R_u_v_col, linewidth=3, label=key)
    pyplot.legend(loc=0)
    pyplot.savefig('R_u_v.' + fileext, bbox_inches='tight')

    figid += 1
    pyplot.figure(figid)
    key = "R_v_v" + str(ifile)
    pyplot.semilogx(y, R_v_v_col, linewidth=3, label=key)
    pyplot.legend(loc=0)
    pyplot.savefig('R_v_v.' + fileext, bbox_inches='tight')

    figid += 1
    pyplot.figure(figid)
    key = "R_w_w" + str(ifile)
    pyplot.semilogx(y, R_w_w_col, linewidth=3, label=key)
    pyplot.legend(loc=0)
    pyplot.savefig('R_w_w.' + fileext, bbox_inches='tight')

    for s in xrange(0,Ns):
      figid += 1
      pyplot.figure(figid)
      key = "rho_" + sname[s] + "_" + str(ifile)
      pyplot.semilogx(y, rho_s_col[:,s], linewidth=3, label=key)
      pyplot.legend(loc=0)
      rho_file = "rho_" + sname[s] + "." + fileext
      pyplot.savefig(rho_file, bbox_inches='tight')

    for s in xrange(0,Ns):
      figid += 1
      pyplot.figure(figid)
      key = "om_" + sname[s] + "_" + str(ifile)
      pyplot.semilogx(y, om_s_col[:,s], linewidth=3, label=key)
      pyplot.legend(loc=0)
      rho_file = "om_" + sname[s] + "." + fileext
      pyplot.savefig(rho_file, bbox_inches='tight')

    figid += 1
    pyplot.figure(figid)
    key = "dy_collocation" + str(ifile)
    pyplot.loglog(y, dy, 'o-', linewidth=3, label=key)
    pyplot.legend(loc=0)
    pyplot.savefig('dy.' + fileext, bbox_inches='tight')

    figid += 1
    pyplot.figure(figid)
    key = "dy_break" + str(ifile)
    pyplot.loglog(yb, dyb, 'o-', linewidth=3, label=key)
    pyplot.legend(loc=0)
    pyplot.savefig('dyb.' + fileext, bbox_inches='tight')

    figid += 1
    pyplot.figure(figid)
    key = "bar_mu" + str(ifile)
    pyplot.semilogx(y, mu_col, linewidth=3, label=key)
    pyplot.legend(loc=0)
    pyplot.savefig('bar_mu.' + fileext, bbox_inches='tight')

    figid += 1
    pyplot.figure(figid)
    key = "bar_nu" + str(ifile)
    pyplot.semilogx(y, nu_col, linewidth=3, label=key)
    pyplot.legend(loc=0)
    pyplot.savefig('bar_nu.' + fileext, bbox_inches='tight')

    figid += 1
    pyplot.figure(figid)
    key = "dssqr_over_2nu" + str(ifile)
    pyplot.loglog(yb, dssqr_over_2nu[:,0], linewidth=3, label=key)
    pyplot.legend(loc=0)
    pyplot.savefig('dssqr_over_2nu.' + fileext, bbox_inches='tight')

    figid += 1
    pyplot.figure(figid)
    key = "fav_u" + str(ifile)
    pyplot.plot(y, fav_u[:,0], linewidth=3, label=key)
    pyplot.legend(loc=0)
    pyplot.savefig('fav_u.' + fileext, bbox_inches='tight')

    figid += 1
    pyplot.figure(figid)
    key = "fav_H" + str(ifile)
    pyplot.plot(y, fav_H[:,0], linewidth=3, label=key)
    pyplot.legend(loc=0)
    pyplot.savefig('fav_H.' + fileext, bbox_inches='tight')

    if (plot_all):
        figid += 1
        pyplot.figure(figid)
        key = "fav_H_yy" + str(ifile)
        pyplot.plot(y, fav_H_yy[:,0], linewidth=0.1, label=key)
        pyplot.axhline(linewidth=0.1, color='r')
        pyplot.legend(loc=0)
        pyplot.savefig('fav_H_yy.' + fileext, bbox_inches='tight')
Beispiel #2
0
def getblparam(hdf5file):
    print "Processing", hdf5file

    # Load a stats file
    f = h5py.File(hdf5file,'r')

    # Grab time
    t=f['t'].value[0]

    # Grab number of points in x and z
    Nx=f['Nx'].value[0]
    Nz=f['Nz'].value[0]

    # Grab domain lengths
    Lx=f['Lx'].value[0]
    Ly=f['Ly'].value[0]
    Lz=f['Lz'].value[0]

    # Grab number of collocation points and B-spline order
    Ny=f['Ny'].value[0]
    k=f['k'].value

    # Grab collocation points
    y = f['collocation_points_y'].value

    # Grab number of species
    Ns=f['antioch_constitutive_data'].attrs['Ns'][0]

    # Grab species names
    sname= np.chararray(Ns, itemsize=5)
    for s in xrange(0,Ns):
      sname[s]=f['antioch_constitutive_data'].attrs['Species_'+str(s)]

    # Get "mass" matrix and convert to dense format
    D0T_gb = f['Dy0T'].value
    D0T = gb.gb2ge(D0T_gb, Ny, k-2)

    # Get "mass" matrix and convert to dense format
    D1T_gb = f['Dy1T'].value
    D1T = gb.gb2ge(D1T_gb, Ny, k-2)

    # Grab rho coefficients
    rho_coeff = f['bar_rho'].value
    rho_coeff = np.array(rho_coeff).reshape(Ny,1)

    # Grab rho_u coefficients
    rho_u_coeff = f['bar_rho_u'].value
    rho_u_coeff = np.array(rho_u_coeff).transpose().reshape(Ny,3)

    # Grab rho_E coefficients
    rho_E_coeff = f['bar_rho_E'].value
    rho_E_coeff = np.array(rho_E_coeff).reshape(Ny,1)

    # Grab T coefficients
    T_coeff = f['bar_T'].value
    T_coeff = np.array(T_coeff).reshape(Ny,1)

    # Grab rho_s coefficients
    rho_s_coeff = f['bar_rho_s'].value
    rho_s_coeff = np.array(rho_s_coeff).transpose().reshape(Ny,Ns)

    # Grab bar_u coefficients
    bar_u_coeff = f['bar_u'].value
    bar_u_coeff = np.array(bar_u_coeff).transpose().reshape(Ny,3)

    # Grab bar_u coefficients
    bar_mu_coeff = f['bar_mu'].value
    bar_mu_coeff = np.array(bar_mu_coeff).transpose().reshape(Ny,1)

    # Grab p coefficients
    p_coeff = f['bar_p'].value
    p_coeff = np.array(p_coeff).reshape(Ny,1)

    # Grab a coefficients
    a_coeff = f['bar_a'].value
    a_coeff = np.array(a_coeff).reshape(Ny,1)

    # Grid delta y, collocation
    dy = np.diff(y)
    dy = np.append(dy,dy[Ny-2])
    dy = np.array(dy).reshape(Ny,1)

    # Grab breakpoints
    yb = f['breakpoints_y'].value

    # Grid delta y, breakpoints
    dyb = np.diff(yb)
    dyb = np.append(dyb,dyb[Ny-6])
    dyb = np.array(dyb).reshape(Ny-4,1)

    # Integration weights
    i_weights = f['integration_weights'].value
    i_weights = np.array(i_weights).reshape(Ny,1)

    # Done getting data
    f.close()

    D0 = D0T.transpose()
    D1 = D1T.transpose()

    # Coefficients -> Collocation points
    rho_col     = D0*rho_coeff
    rho_u_col   = D0*rho_u_coeff
    rho_E_col   = D0*rho_E_coeff
    T_col       = D0*T_coeff
    p_col       = D0*p_coeff
    a_col       = D0*a_coeff
    rho_s_col   = D0*rho_s_coeff
    bar_u_col   = D0*bar_u_coeff
    bar_du_col  = D1*bar_u_coeff

    # Input parameters
    T_wall     =  T_coeff[0,0]
    T_inf      =  T_coeff[Ny-1,0]
    V_wall     =  bar_u_coeff[0,1]
    U_inf      =  bar_u_coeff[Ny-1,0]
    V_inf      =  bar_u_coeff[Ny-1,1]
    W_inf      =  bar_u_coeff[Ny-1,2]

    # Grid parameters
    y1         =  y[1]
    y1b        =  yb[1]

    # Raw parameters
    p_wall     =  p_coeff[0,0]
    p_inf      =  p_coeff[Ny-1,0]
    a_wall     =  a_coeff[0,0]
    a_inf      =  a_coeff[Ny-1,0]
    rho_wall   =  rho_coeff[0,0]
    mu_wall    =  bar_mu_coeff[0,0]
    rho_inf    =  rho_coeff[Ny-1,0]
    mu_inf     =  bar_mu_coeff[Ny-1,0]
    dudy_wall  =  bar_du_col[0,0]

    # Compute delta (BL thickness)
    jdelta = 0
    for j in xrange(0,Ny):
        if (bar_u_col[j,0] < 0.99*U_inf):
           jdelta += 1
        else:
           break

    # compute delta by interpolation
    frac  = (0.99*U_inf - bar_u_col[j,0]) / (bar_u_col[jdelta+1,0] - bar_u_col[j,0])
    delta = (y[jdelta+1] - y[jdelta]) * frac + y[jdelta]

    # TODO: Here the value at the edge is defined as the value at "infinity"
    #       This needs to be generalized for baseflow computations
    rho_edge  = rho_inf
    rhoU_edge = rho_inf * U_inf
    U_edge    = U_inf
    mu_edge   = mu_inf

    # Compute delta_star (displacement thickness)
    delta_star = 0
    for j in xrange(0,Ny):
        delta_star += (1 - rho_u_col[j,0] / rhoU_edge) * i_weights[j,0]

    # Compute theta (momentum thickness)
    theta = 0
    for j in xrange(0,Ny):
        theta += rho_u_col[j,0] / rhoU_edge * (1 - bar_u_col[j,0] / U_edge) * i_weights[j,0]

    # Computed parameters
    Re_delta_star    = rho_edge * U_edge * delta_star / mu_edge
    Re_theta         = rho_edge * U_edge * theta      / mu_edge
    Re_delta         = rho_edge * U_edge * delta      / mu_edge
    H1               = delta_star / theta
    H2               = delta      / theta
    tau_wall         = mu_wall * dudy_wall
    u_tau            = np.sqrt(tau_wall / rho_wall)
    delta_nu         = mu_wall / rho_wall / u_tau
    Re_tau           = rho_wall * u_tau * delta / mu_wall
    turnover_time    = delta / u_tau
    flowthrough_time = Lx / U_edge

    Lx_over_delta    = Lx / delta
    Ly_over_delta    = Ly / delta
    Lz_over_delta    = Lz / delta
    y1_plus          = y1 / delta_nu
    y1b_plus         = y1b / delta_nu
    Dx               = Lx / Nx
    Dz               = Lz / Nz
    Dx_plus          = Dx / delta_nu
    Dz_plus          = Dz / delta_nu

    # Resolution in y, collocation
    Ny_below_5plus  = 0
    for j in xrange(0,Ny):
        if (y[j] < 5*delta_nu):
           Ny_below_5plus += 1
        else:
           break

    Ny_below_10plus = 0
    for j in xrange(0,Ny):
        if (y[j] < 10*delta_nu):
           Ny_below_10plus += 1
        else:
           break

    Ny_below_delta = 0
    for j in xrange(0,Ny):
        if (y[j] < delta):
           Ny_below_delta += 1
        else:
           break

    # Resolution in y, breakpoints
    Nyb_below_5plus  = 0
    for j in xrange(0,Ny):
        if (yb[j] < 5*delta_nu):
           Nyb_below_5plus += 1
        else:
           break

    Nyb_below_10plus = 0
    for j in xrange(0,Ny):
        if (yb[j] < 10*delta_nu):
           Nyb_below_10plus += 1
        else:
           break

    Nyb_below_delta = 0
    for j in xrange(0,Ny):
        if (yb[j] < delta):
           Nyb_below_delta += 1
        else:
           break

    # Put parameters in an array
    prms = np.empty([0,1])
    prms = np.append(prms, [t                 ])
    prms = np.append(prms, [rho_wall          ])
    prms = np.append(prms, [p_wall            ])
    prms = np.append(prms, [a_wall            ])
    prms = np.append(prms, [mu_wall           ])
    prms = np.append(prms, [mu_inf            ])
    prms = np.append(prms, [delta_star        ])
    prms = np.append(prms, [theta             ])
    prms = np.append(prms, [delta             ])
    prms = np.append(prms, [H1                ])
    prms = np.append(prms, [H2                ])
    prms = np.append(prms, [dudy_wall         ])
    prms = np.append(prms, [tau_wall          ])
    prms = np.append(prms, [u_tau             ])
    prms = np.append(prms, [delta_nu          ])
    prms = np.append(prms, [y1b_plus          ])
    prms = np.append(prms, [Re_tau            ])
    prms = np.append(prms, [Re_delta_star     ])
    prms = np.append(prms, [Re_theta          ])
    prms = np.append(prms, [Re_delta          ])
    prms = np.append(prms, [U_edge            ])
    prms = np.append(prms, [V_inf             ])
    prms = np.append(prms, [W_inf             ])
    prms = np.append(prms, [T_inf             ])
    prms = np.append(prms, [rho_inf           ])
    prms = np.append(prms, [p_inf             ])
    prms = np.append(prms, [a_inf             ])
    prms = np.append(prms, [Dx_plus           ])
    prms = np.append(prms, [Dz_plus           ])
    prms = np.append(prms, [Lx_over_delta     ])
    prms = np.append(prms, [Ly_over_delta     ])
    prms = np.append(prms, [Lz_over_delta     ])
    prms = np.append(prms, [turnover_time     ])
    prms = np.append(prms, [flowthrough_time  ])

    return prms
Beispiel #3
0
    def load_metadata(self, file):

        print "Loading metadata from", file
        f = h5py.File(file, "r")

        # Input metadata
        self.metascalars = ['Nx', 'Ny', 'Nz', 'k', 'Lx', 'Ly', 'Lz']
        self.metalines   = ['breakpoints_y',
                            'collocation_points_x',
                            'collocation_points_y',
                            'collocation_points_z',
                            'Dy0T', 'Dy1T', 'Dy2T',
                            'integration_weights']

        # Open first file to load metadata
        f = h5py.File(file, "r")

        # Load reference metadata
        for scalar in self.metascalars:
            self.__dict__[scalar] = np.squeeze(f[scalar][()])

        for line in self.metalines:
            self.__dict__[line] = np.squeeze(f[line][()])

        # Grab number of species
        self.Ns=f['antioch_constitutive_data'].attrs['Ns'][0]

        # Grab species names
        self.sname= []
        for s in xrange(self.Ns):
            self.sname.append(f['antioch_constitutive_data'].attrs['Species_'+str(s)])

        # Grab delta growth rate
        self.grD = f['largo'].attrs['grdelta'][0]

        # Grab largo formulation
        self.formulationi = f['largo'].attrs['formulation']

        # Grab mean amplitude growth rates
        if 'largo_gramp_mean' in f:
            self.largo_gramp = f['largo_gramp_mean'].value
        else:
            self.largo_gramp = None #np.zeros((6,1))

        # Grab baseflow coefficients
        if 'largo_baseflow' in f:
            self.coeff_baseflow = np.squeeze(f['largo_baseflow'][()])
        else:
            self.coeff_baseflow = None #np.zeros((6,2))

        if 'largo_baseflow_dx' in f:
            self.coeff_baseflow_dx = np.squeeze(f['largo_baseflow_dx'][()])
        else:
            self.coeff_baseflow_dx = None #np.zeros((6,2))

        # Process reference metadata
        # Check if the first file is a sample file
        # or a summary file (by looking for y)
        if 'y' not in f.keys():
            self.y   = np.squeeze(f[line][()])
            self.processfiles = True
        else:
            if self.nfiles > 1:
                #f.close()
                print >>sys.stderr, "The first file is a summary file, only one can be loaded"
            else:
                self.y   = self.collocation_points_y
                self.processfiles = False

        f.close()

        self.yb  = self.breakpoints_y
        self.iw  = self.integration_weights
        self.D0T = gb.gb2ge(self.Dy0T, self.Ny, self.k-2)
        self.D1T = gb.gb2ge(self.Dy1T, self.Ny, self.k-2)
        self.D2T = gb.gb2ge(self.Dy2T, self.Ny, self.k-2)

        # Matrices to compute first and second derivatives
        # from data on collocation points.
        # Eg, first derivative computed as df/dy = D1 * D0^-1 f(y)
        self.invD0T = np.linalg.solve(self.D0T, np.eye(self.Ny))
        self.invD0T_D1T = np.dot(self.invD0T, self.D1T)
        self.invD0T_D2T = np.dot(self.invD0T, self.D2T)
        return
Beispiel #4
0
def load(h5filenames, verbose=False):
    """Load the data required by process() into a dict from named files.
    Averages of /twopoint_kx and /twopoint_kz are taken across all inputs.
    Other results reflect only the metadata from the last file loaded.
    """
    Rkx     = None
    Rkz     = None
    bar     = None
    bar_T   = None
    bar_u   = None
    bar_rho = None
    bar_cs  = None
    d       = {}
    D0      = None
    for ndx, h5filename in enumerate(h5filenames):
        if verbose:
            log.info("Loading %d/%d: %s"
                     % (ndx+1, len(h5filenames), h5filename))

        h5file = h5py.File(h5filename, 'r')
        Ns = 0
        sname = []

        # Grab number of collocation points and B-spline order
        Ny = h5file['Ny'][0]
        k  = h5file['k'][0]

        # Get "mass" matrix and convert to dense format
        D0T_gb = h5file['Dy0T'].value
        D0T    = gb.gb2ge(D0T_gb, Ny, k-2)
        D0     = D0T.transpose()

        if "antioch_constitutive_data" in h5file:
            Ns=h5file['antioch_constitutive_data'].attrs['Ns'][0]
            sname= np.chararray(Ns, itemsize=5)
            for s in xrange(0,Ns):
                sname[s]=h5file['antioch_constitutive_data'].attrs['Species_'+str(s)]
        d.update(dict(
            kx = h5file['kx'][()],
            kz = h5file['kz'][()],
            Lx = h5file['Lx'][0],
            Ly = h5file['Ly'][0],
            Lz = h5file['Lz'][0],
            Nx = h5file['Nx'][0],
            Ny = h5file['Ny'][0],
            Nz = h5file['Nz'][0],
            y  = h5file['collocation_points_y'][()],
            Ns = Ns,
            sn = sname
        ))
        if Rkx is None:
            Rkx  = np.squeeze(h5file['twopoint_kx'][()].view(np.complex128))
        else:
            Rkx += np.squeeze(h5file['twopoint_kx'][()].view(np.complex128))
        if Rkz is None:
            Rkz  = np.squeeze(h5file['twopoint_kz'][()].view(np.complex128))
        else:
            Rkz += np.squeeze(h5file['twopoint_kz'][()].view(np.complex128))
        if bar_T is None:
            bar_T  = np.squeeze(h5file['bar_T'][()].view(np.float64))
        else:
            bar_T += np.squeeze(h5file['bar_T'][()].view(np.float64))
        if bar_u is None:
            bar_u  = np.squeeze(h5file['bar_u'][()].view(np.float64))
        else:
            bar_u += np.squeeze(h5file['bar_u'][()].view(np.float64))
        if bar_rho is None:
            bar_rho  = np.squeeze(h5file['bar_rho'][()].view(np.float64))
        else:
            bar_rho += np.squeeze(h5file['bar_rho'][()].view(np.float64))
        if "antioch_constitutive_data" in h5file:
            if Ns > 1:
                if bar_cs is None:
                    bar_cs  = np.squeeze(h5file['bar_cs_s'][()].view(np.float64))
                else:
                    bar_cs += np.squeeze(h5file['bar_cs_s'][()].view(np.float64))
            else:
                if bar_cs is None:
                    bar_cs  = 0 * np.array(bar_rho).reshape(1,Ny) + 1
                else:
                    bar_cs += 0 * np.array(bar_rho).reshape(1,Ny) + 1
        h5file.close()
    if Rkx is not None:
        Rkx /= len(h5filenames)
    if Rkz is not None:
        Rkz /= len(h5filenames)
    if bar_T is not None:
        bar_T /= len(h5filenames)
        bar_T  = np.dot(D0,bar_T)
    if bar_u is not None:
        bar_u /= len(h5filenames)
        bar_u  = np.transpose(np.dot(D0,np.transpose(bar_u)))
    if bar_rho is not None:
        bar_rho /= len(h5filenames)
        bar_rho = np.dot(D0,bar_rho)
    if bar_cs is not None:
        bar_cs /= len(h5filenames)
        bar_cs  = np.transpose(np.dot(D0,np.transpose(bar_cs)))

    # Pack mean fields, assume no variable remained as None,
    # except for possibly bar_cs
    if bar_cs is None:
        bar = np.concatenate((bar_T, bar_u, bar_rho        )).reshape(5+Ns, Ny)
    else:
        bar = np.concatenate((bar_T, bar_u, bar_rho, bar_cs)).reshape(5+Ns, Ny)

    d.update(dict(
        Rkx = Rkx,
        Rkz = Rkz,
        bar = bar
    ))
    return d
Beispiel #5
0
def getplan(hdf5file, dt):
    print "Processing", hdf5file

    # Load a stats file
    f = h5py.File(hdf5file,'r')
    # print "File loaded"

    # Grab time
    t=f['t'].value[0]

    # Grab number of points in x and z
    Nx=f['Nx'].value[0]
    Nz=f['Nz'].value[0]

    # Grab domain lengths
    Lx=f['Lx'].value[0]
    Ly=f['Ly'].value[0]
    Lz=f['Lz'].value[0]

    # Grab number of collocation points and B-spline order
    Ny=f['Ny'].value[0]
    k=f['k'].value

    # Grab collocation points
    y = f['collocation_points_y'].value

    # Grab number of species
    Ns=f['antioch_constitutive_data'].attrs['Ns'][0]

    # Grab species names
    sname= np.chararray(Ns, itemsize=5)
    for s in xrange(0,Ns):
      sname[s]=f['antioch_constitutive_data'].attrs['Species_'+str(s)]

    # Get "mass" matrix and convert to dense format
    D0T_gb = f['Dy0T'].value
    D0T = gb.gb2ge(D0T_gb, Ny, k-2)

    # Get "mass" matrix and convert to dense format
    D1T_gb = f['Dy1T'].value
    D1T = gb.gb2ge(D1T_gb, Ny, k-2)

    # Grab rho coefficients
    rho_coeff = f['bar_rho'].value
    rho_coeff = np.array(rho_coeff).reshape(Ny,1)

    # Grab rho_u coefficients
    rho_u_coeff = f['bar_rho_u'].value
    rho_u_coeff = np.array(rho_u_coeff).transpose().reshape(Ny,3)

    # Grab rho_E coefficients
    rho_E_coeff = f['bar_rho_E'].value
    rho_E_coeff = np.array(rho_E_coeff).reshape(Ny,1)

    # Grab T coefficients
    T_coeff = f['bar_T'].value
    T_coeff = np.array(T_coeff).reshape(Ny,1)

    # Grab rho_u_u coefficients
    rho_u_u_coeff = f['bar_rho_u_u'].value
    rho_u_u_coeff = np.array(rho_u_u_coeff).transpose().reshape(Ny,6)

    # Grab rho_s coefficients
    rho_s_coeff = f['bar_rho_s'].value
    rho_s_coeff = np.array(rho_s_coeff).transpose().reshape(Ny,Ns)

    # Grab bar_u coefficients
    bar_u_coeff = f['bar_u'].value
    bar_u_coeff = np.array(bar_u_coeff).transpose().reshape(Ny,3)

    # Grab bar_u coefficients
    bar_mu_coeff = f['bar_mu'].value
    bar_mu_coeff = np.array(bar_mu_coeff).transpose().reshape(Ny,1)

    # Grab bar_u coefficients
    bar_a_coeff = f['bar_a'].value
    bar_a_coeff = np.array(bar_a_coeff).transpose().reshape(Ny,1)

    # Grid delta y, collocation
    dy = np.diff(y)
    dy = np.append(dy,dy[Ny-2])
    dy = np.array(dy).reshape(Ny,1)

    # Grab breakpoints
    yb = f['breakpoints_y'].value

    # Grid delta y, breakpoints
    dyb = np.diff(yb)
    dyb = np.append(dyb,dyb[Ny-6])
    dyb = np.array(dyb).reshape(Ny-4,1)

    # Integration weights
    i_weights = f['integration_weights'].value
    i_weights = np.array(i_weights).reshape(Ny,1)

    # Done getting data
    f.close()

    D0 = D0T.transpose()
    D1 = D1T.transpose()

    # Coefficients -> Collocation points
    rho_col     = D0*rho_coeff
    rho_u_col   = D0*rho_u_coeff
    rho_E_col   = D0*rho_E_coeff
    T_col       = D0*T_coeff
    rho_u_u_col = D0*rho_u_u_coeff
    rho_s_col   = D0*rho_s_coeff
    bar_u_col   = D0*bar_u_coeff
    bar_du_col  = D1*bar_u_coeff

    # Input parameters
    T_wall     =  T_coeff[0,0]
    T_inf      =  T_coeff[Ny-1,0]
    V_wall     =  bar_u_coeff[0,1]
    U_inf      =  bar_u_coeff[Ny-1,0]
    mu_wall    =  bar_mu_coeff[0,0]
    mu_inf     =  bar_mu_coeff[Ny-1,0]
    a_inf      =  bar_a_coeff[Ny-1,0]
    rho_u_inf  =  rho_u_coeff[Ny-1,0]

    # Raw parameters
    rho_wall   =  rho_coeff[0,0]
    rho_inf    =  rho_coeff[Ny-1,0]
    dudy_wall  =  bar_du_col[0,0]


    # Compute bl thickness
    # NOTE: keeping the value from comp for now,
    # since the simulation is not converged
    jdelta = 0
    for j in xrange(0,Ny):
        if (bar_u_col[j,0] < 0.99*U_inf):
           jdelta += 1
        else:
           break
    delta = y[jdelta]

    # FIXME: Generalize definition of edge values
    rho_edge  = rho_inf
    rhoU_edge = rho_u_inf
    U_edge    = U_inf
    mu_edge   = mu_inf

    # Compute delta_star (displacement thickness)
    delta_star = 0
    for j in xrange(0,Ny):
        delta_star += (1 - rho_u_col[j,0] / rhoU_edge) * i_weights[j,0]

    # Compute theta (momentum thickness)
    theta = 0
    for j in xrange(0,Ny):
        theta += rho_u_col[j,0] / rhoU_edge * (1 - bar_u_col[j,0] / U_edge) * i_weights[j,0]

    # Pick thickness as reference for edge resolution
    if use_theta_reference :
        delta_ref_resolution = theta
    else:
        delta_ref_resolution = delta_star

    # Computed parameters
    Re_delta_star = rho_inf * U_inf * delta_star / mu_inf
    Re_theta      = rho_inf * U_inf * theta      / mu_inf
    H1            = delta_star / theta
    H2            = delta      / theta
    tau_wall      = mu_wall * dudy_wall
    u_tau         = np.sqrt(tau_wall / rho_wall)
    delta_nu      = mu_wall / rho_wall / u_tau
    Re_tau        = rho_wall * u_tau * delta / mu_wall
    turnover_time = delta / u_tau

    # Output parameters
    print 'Boundary layer input parameters'
    print 'T_wall                      = ', T_wall
    print 'T_inf                       = ', T_inf
    print 'U_inf                       = ', U_inf
    print 'V_wall                      = ', V_wall
    print
    print 'Raw parameters'
    print 'Ma_inf                      = ', U_inf/a_inf
    print 'rho_wall                    = ', rho_wall
    print 'rho_inf                     = ', rho_inf
    print 'delta_star                  = ', delta_star
    print 'theta                       = ', theta
    print 'delta                       = ', delta
    if dt != 0:
        print 'dt                          = ', dt

    print
    print 'Computed parameters'
    print 'Re_delta_star               = ', Re_delta_star
    print 'Re_theta                    = ', Re_theta
    print 'H1                          = ', H1
    print 'H2                          = ', H2
    print 'dU/dy|_wall                 = ', dudy_wall
    print 'tau|_wall                   = ', tau_wall
    print 'u_tau                       = ', u_tau
    print 'delta_nu                    = ', delta_nu
    print 'Re_tau                      = ', Re_tau
    print 'turnover_time               = ', turnover_time

    # Compute run parameters
    delta_tgt = delta * delta_factor
    Lx_tgt = Lx_over_delta_tgt * delta_tgt
    Ly_tgt = Ly_over_delta_tgt * delta_tgt
    Lz_tgt = Lz_over_delta_tgt * delta_tgt
    Dx_tgt = Dx_over_delta_nu_tgt * delta_nu
    Dz_tgt = Dz_over_delta_nu_tgt * delta_nu

    # Compute y-mesh parameters
    nymin = 0;
    nymax = 4096;
    ny_mid = (nymin+nymax)/2;
    tol_ny = 0.01;
    err_ny = 2*tol_ny;
    while (abs(err_ny) > tol_ny):
      rmin = 0.1
      rmax = 100
      r_mid = (rmin+rmax)/2
      tol_r = 0.01
      err_r = 2*tol_r
      # r is the stretching factor;
      # solve for the r that satisfies the y1_plus condition
      # for this
      while (abs(err_r) > tol_r and (rmax - rmin) > 1.0E-10):
        dy1 = 1/float(ny_mid)
        y_grid = np.zeros((ny_mid,1)).reshape(ny_mid,1)
        for j in xrange(0,ny_mid):
          xloc        = j * dy1
          y_grid[j,0] = gety(r_mid, Ly_tgt, xloc)

        Dy_vec = np.zeros((ny_mid,1)).reshape(ny_mid,1)
        for j in xrange(0,ny_mid-1):
          Dy_vec[j,0] = y_grid[j+1,0]-y_grid[j,0]

        y1_plus_mid = y_grid[1,0]/delta_nu

        err_r = (y1_plus_mid-y1_plus_tgt)/y1_plus_tgt
        if (err_r > tol_r):
           rmin = r_mid
           r_mid = (rmax+r_mid)/2
        elif (err_r < tol_r):
           rmax = r_mid;
           r_mid = (rmin+r_mid)/2
        #else:
          # result is converged

      # Assign the solution to r
      r = r_mid

      # now evaluate if for this grid the criterion for dymax is satisfied
      for j in xrange(0,ny_mid):
        if (y_grid[j,0] < delta_tgt):
          Dy_edge_over_delta_ref_mid = Dy_vec[j,0] / delta_ref_resolution
        else:
          break

      Dy_max_over_delta_ref_mid = (max(Dy_vec) / delta_star)[0]
      err_ny = (Dy_edge_over_delta_ref_mid-Dy_edge_over_delta_ref_tgt)/Dy_edge_over_delta_ref_mid
      # if the dy at edge is larger than the target,
      # increase the number of points,
      # decrease otherwise
      if (err_ny > tol_ny):
         nymin = ny_mid
         ny_mid = (nymax+ny_mid)/2
      elif (err_ny < tol_ny):
         nymax = ny_mid
         ny_mid = (nymin+ny_mid)/2
      #else:
        # result is converged

    # Assign grid parameters when converged
    Ny_tgt  = ny_mid
    htdelta = r_mid

    # Compute target run parameters
    Nx_tgt = Lx_tgt / Dx_tgt
    Nz_tgt = Lz_tgt / Dz_tgt
    Np     = Nx_tgt * Ny_tgt * Nz_tgt
    turnover_tgt = turnover_time * delta_factor

    Nt_turntime       = 0
    CPUh_turnover     = 0
    Nt_turntime_tgt   = 0
    CPUh_turnover_tgt = 0
    if dt != 0:
        Nt_turntime_tgt   = turnover_tgt / dt
        CPUh_turnover_tgt = CPUh_performance * Np * Nt_turntime_tgt / 1.0e9

    dt_sample         = turnover_tgt / samples_per_turnover
    nt_sample         = dt_sample / dt

    # Output run parameters
    print
    print 'Run target parameters'
    print 'delta_factor                = ', delta_factor
    print 'turnover_tgt                = ', turnover_tgt
    print 'Nt_turnover_tgt             = ', Nt_turntime_tgt
    print 'Lx_over_delta_tgt           = ', Lx_over_delta_tgt
    print 'Ly_over_delta_tgt           = ', Ly_over_delta_tgt
    print 'Lz_over_delta_tgt           = ', Lz_over_delta_tgt
    print 'Dx_over_delta_nu_tgt        = ', Dx_over_delta_nu_tgt
    print 'Dz_over_delta_nu_tgt        = ', Dz_over_delta_nu_tgt
    print 'y1_plus_tgt                 = ', y1_plus_tgt
    if (use_theta_reference):
        print 'Reference outer thickness   = ', 'theta'
    else:
        print 'Reference outer thickness   = ', 'delta_star'
    print 'Dy_edge_over_delta_ref_tgt  = ', Dy_edge_over_delta_ref_tgt
    print
    print 'Run setup and cost parameters'
    print 'htdelta                     = ', htdelta
    print 'Lx_tgt                      = ', Lx_tgt
    print 'Ly_tgt                      = ', Ly_tgt
    print 'Lz_tgt                      = ', Lz_tgt
    print 'Nx_tgt                      = ', Nx_tgt
    print 'Ny_tgt                      = ', Ny_tgt
    print 'Nz_tgt                      = ', Nz_tgt
    print 'y1_plus                     = ', y1_plus_mid
    print 'Dy_edge_over_delta_ref      = ', Dy_edge_over_delta_ref_mid
    print 'Dy_max_over_delta_ref       = ', Dy_max_over_delta_ref_mid
    print 'Np_million                  = ', Np / 1.0e6
    print 'CPUh/Np(10^6)/Nt(10^3)      = ', CPUh_performance
    print 'CPUh/turnover               = ', CPUh_turnover_tgt
    print 'samples_per_turnover        = ', samples_per_turnover
    print 'dt_sample                   = ', dt_sample
    print 'nt_sample                   = ', nt_sample