Example #1
0
def eddyMaps(DI):

    nToDo = 1
    nRlz = get_nRlz(DI)
    x0, xL = get_domainBounds(DI)

    if not os.path.exists(DI['pdir'] + "eddyMaps"):
        os.mkdir(DI['pdir'] + "eddyMaps")

    gpfile = open(DI['pdir'] + "eddyMaps/plotEddyMaps.gnu", 'w')
    gpfile.write("set bar 0\n")
    gpfile.write("set xlabel('Position (m)')\n")
    gpfile.write("set ylabel('Time or space (s or m)')\n")
    gpfile.write("set xrange [%g:%g]\n" % (x0, xL))

    flist = glob.glob(DI['cdir'] + "runtime/runtime*")

    for ii, filename in enumerate(flist[0:nToDo]):

        print(
            "eddyMaps: writing for realization %i of %i (with %i total possible)"
            % (ii + 1, nToDo, nRlz))

        gpfile.write("plot 'eddy_map_" + "{0:0>5}".format(ii) +
                     ".dat' us 1:2:3 with xerrorbars ti 'R_%i'; pause -1\n" %
                     (ii))

        lines = []
        ##     the below doesn't work for Hewson's version
        theGrep = sp.getoutput("grep -h '^ *[1-9]' " + filename).split('\n')
        ## Instead the below seems to be a new way to do this
        #p_out = sp.Popen(["grep", "-h", "^ *[1-9]" , filename], stdout=sp.PIPE)
        #theGrep = p_out.communicate()[0].split('\n')[0:-1]  # the [0] arg is output, [1] is stderr

        for i in range(len(theGrep)):
            lines.append(theGrep[i])

        ### check for zero length in 'lines' in case there were no eddies
        if len(lines) > 1:
            x = np.empty((len(lines), len(lines[0].split())))
            for i, line in enumerate(lines):
                x[i, :] = np.fromstring(line, sep=" ")

            x = x[:, (6, 1, 5)]  # edPos, time, edSize
            x[:, 2] = x[:, 2] / 2.0  # fix "error bar" for gnuplot plotting

            fname = DI['pdir'] + "eddyMaps/eddy_map_" + "{0:0>5}".format(
                ii) + ".dat"
            np.savetxt(fname, x, fmt="%12.5e", comments=commentHdr)

    gpfile.close()
Example #2
0
def checkForBlowout(DI):

    dataFiles = glob.glob(DI['cdir'] + "data_py/data_*.npy")
    ntimes = len(dataFiles)
    nrlz = get_nRlz(DI)
    varNames = get_dataHeaderVars(DI)
    times = get_inputFileParameter(
        DI, ("dumpTimes", ))  # times or ypositions if spatial

    # Blowout -----------------------------------------------------------------------------------

    extinction_temp = 1800.0

    itemp = varNames.index("temp")
    blowout_num = 0

    print("Checking for extinguished/blowout cases.")

    for irlz in range(nrlz):
        data = get_data_realization(DI, ntimes - 1, irlz)
        T = data[:, itemp]

        Tmax = np.amax(T)
        if Tmax < extinction_temp:
            print("BLOWOUT: rlz " + str(irlz))
            blowout_num += 1

    if blowout_num >= 1:
        warning_flag = input(
            "WARNING: Blowout in %i realizations. Process at your own risk. Quit? (y/n) "
            % (blowout_num))

        if warning_flag == "y":
            sys.exit()

    else:
        print("No blowout cases.")
Example #3
0
def basic_stats(DI, nfbins=60, nx=-1, favre=False, do_yt=False, filter=True):

    favre = False
    #--------------------------------------------------------------------------------------------

    dataFiles = glob.glob(DI['cdir'] + "data_py/data_*.npy")
    ntimes = len(dataFiles)
    nrlz = get_nRlz(DI)
    varNames = get_dataHeaderVars(DI)
    times = get_inputFileParameter(
        DI, ("dumpTimes", ))  # times or ypositions if spatial
    times = times[
        0:
        ntimes]  # limit times to number of dataFiles, not all listed dumpTimes
    nvar = len(varNames)
    cCoord = get_inputFileParameter(DI, ("params", "cCoord"))
    df = 1.0 / nfbins
    dxmin = get_inputFileParameter(DI, ("params", "dxmin"))
    nx = int(1.0 / dxmin / 10) if nx < 0 else nx
    x0, xL = get_domainBounds(DI)
    L = get_inputFileParameter(DI, ("params", "domainLength"))
    Dx = L / nx
    coflow = get_inputFileParameter(DI, ("initParams", "vel_min"))

    if nx % 2 == 0:
        nx = nx + 1  # make it odd for centerline

    try:
        imixf = varNames.index("mixf")
    except:
        imixf = -1
    try:
        irho = varNames.index("rho")
    except:
        irho = -1
    try:
        idvisc = varNames.index("dvisc")
    except:
        idvisc = -1
    try:
        iposf = varNames.index("posf")
    except:
        raise ValueError("In basic_stats: no posf variable found")
    try:
        ipos = varNames.index("pos")
    except:
        raise ValueError("In basic_stats: no pos variable found")
    try:
        iuvel = varNames.index("uvel")
    except:
        iuvel = -1

    doConditional = True if imixf > -1 else False

    if irho == -1 and favre:
        raise ValueError(
            "In basic_stats: favre is true, but there is no rho in data file")

    if do_yt and (irho == -1 or iuvel == -1):
        raise ValueError(
            "In basic_stats: do_yt is true but missing rho or uvel in data file"
        )

    #--------------------------------------------------------------------------------------------

    X = np.linspace(x0, xL, nx)
    fbins = np.linspace(df / 2.0, 1.0 - df / 2.0, nfbins)

    # Add the turbulence dissipation rate statistics and its logarithm, so we have nvar+2
    means = np.zeros((ntimes, nvar + 2, nx))
    mean2 = np.zeros((ntimes, nvar + 2, nx))
    rhoM = np.zeros((ntimes, nvar + 2, nx))  # needed for favre to normalize
    cmeans = np.zeros((ntimes, nvar, nfbins))
    cmean2 = np.zeros((ntimes, nvar, nfbins))
    binNrm = np.zeros((ntimes, nvar, nfbins))  # to normalize conditional means
    rhouu = np.zeros(ntimes)
    rhou = np.zeros(ntimes)

    for itime in range(ntimes):

        # instead of using the get_data_realization() which reads each data_py*.npy again for each realization,
        # we read and hold the data for the dump time.
        fname = DI['cdir'] + "data_py/data_py_" + "{0:0>5}".format(
            itime) + ".npy"
        data_all = np.load(fname)
        posf_all = data_all[:, 1]
        dx_all = posf_all[1:] - posf_all[0:-1]

        istarts = np.where(dx_all < 0.0)[0] + 1
        istarts = np.insert(istarts, 0, 0.0)

        iends = np.where(dx_all < 0.0)[0]
        iends = np.append(iends, len(posf_all) - 1)

        nrlz = len(
            istarts)  # some sims end early so compute nrlz for each time

        for irlz in range(nrlz):

            print(
                "basic_stats: Processing time # %i of %i; for realization %i of %i"
                % (itime + 1, ntimes, irlz + 1, nrlz))

            #data = get_data_realization(DI, itime, irlz)
            i_s = istarts[irlz]
            i_e = iends[irlz]

            data = data_all[i_s:i_e + 1, :]
            x = data[:, ipos]
            xf = data[:, iposf]
            xf = np.append(xf, x[-1] + (xf[-1] - x[-1]))

            if doConditional:
                dx = np.abs(np.abs(xf[1:])**cCoord - np.abs(xf[0:-1])**cCoord)
                i = np.where(xf[1:] * xf[0:-1] < 0)[0]
                dx[i] = np.abs(
                    np.abs(xf[i + 1])**cCoord + np.abs(xf[i])**cCoord)
                ibin = (data[:, imixf] / df).astype(int)
                ibin[np.where(ibin < 0)] = 0
                ibin[np.where(ibin > nfbins - 1)] = nfbins - 1
                wt = dx * data[:, irho] if favre else dx.copy()

            if favre:
                fextrap = extrap1d(x, data[:, irho])
                rho = fextrap(X)
                rhoM[itime, :, :] = rhoM[itime, :, :] + rho

            for ivar in range(nvar):

                y = data[:, ivar]
                fextrap = extrap1d(x, y)
                Y = fextrap(X)
                Y2 = Y**2

                means[itime,
                      ivar, :] = means[itime,
                                       ivar, :] + (Y * rho if favre else Y)
                mean2[itime,
                      ivar, :] = mean2[itime,
                                       ivar, :] + (Y2 * rho if favre else Y2)

                if doConditional:
                    cmeans[itime, ivar,
                           ibin] = cmeans[itime, ivar, ibin] + y * wt
                    cmean2[itime, ivar,
                           ibin] = cmean2[itime, ivar, ibin] + y * y * wt
                    binNrm[itime, ivar, ibin] = binNrm[itime, ivar, ibin] + wt

            # here compute the turbulent dissipation rate and other derived variables
            #  need to recompute dx using the cell centers and not area weighting
            dx = (x[1:] - x[0:-1])
            dU = (data[1:, iuvel] - data[0:-1, iuvel]
                  )  # ( Uvel[1:] - Uvel[0:-1] )
            dVisc = 0.5 * (data[1:, idvisc] + data[0:-1, idvisc])
            #i = np.where(( x[1:] * x[0:-1] < 0 ) and ( x[1:] > x[0:-1] ) )[0]   # center origin point
            i = np.where(dx < 0)[0]  #crossing realizations
            dx[i] = dx[i - 1]
            #dU = np.append(dU,0.0)  # append a null value for crossing to next rlzn
            #dVisc = np.append(dVisc,0.0)
            diss = dVisc * (
                dU / dx)**2.0  # uses dx from cell centers and averaged dvisc
            diss = np.append(diss, 0.0)
            fextrap = extrap1d(x, diss)
            yDiss = fextrap(X)
            yDiss2 = yDiss**2.0

            means[itime,
                  nvar, :] = means[itime,
                                   nvar, :] + (yDiss * rho if favre else yDiss)
            mean2[itime,
                  nvar, :] = mean2[itime, nvar, :] + (yDiss2 *
                                                      rho if favre else yDiss2)
            logDiss = np.log(np.maximum(yDiss, 1e-20))
            logDiss2 = logDiss**2.0
            means[itime, nvar +
                  1, :] = means[itime, nvar +
                                1, :] + (logDiss * rho if favre else logDiss)
            mean2[itime, nvar +
                  1, :] = mean2[itime, nvar +
                                1, :] + (logDiss2 * rho if favre else logDiss2)

            if do_yt:
                fextrap = extrap1d(x, data[:, irho])
                rho = fextrap(X)
                fextrap = extrap1d(x, data[:, iuvel])
                uvel = fextrap(X)
                rhouu[itime] += np.sum(rho * uvel * uvel)
                rhou[itime] += np.sum(rho * uvel)

        if favre:
            means[itime, :, :] /= rhoM
            mean2[itime, :, :] /= rhoM

        else:
            means[itime, :, :] /= nrlz
            mean2[itime, :, :] /= nrlz

    sig2 = mean2 - means * means
    sig = np.sqrt(np.abs(sig2))

    if doConditional:
        cmeans = cmeans / binNrm
        cmean2 = cmean2 / binNrm
        cmeans[np.where(binNrm == 0)] = 0

        csig2 = cmean2 - cmeans * cmeans
        csig = np.sqrt(np.abs(csig2))
        csig[np.where(binNrm == 0)] = 0

    if do_yt:
        uavg = rhouu / rhou
        uavg_mid = 0.5 * (uavg[1:] + uavg[0:-1])
        Lspatial = get_inputFileParameter(DI, ("params", "Lspatial"))
        if Lspatial:
            print("setting ytu.dat for spatial case")
            ypos = np.array(times[0:ntimes])
            dy = ypos[1:] - ypos[0:-1]
            tpos = np.cumsum(dy / uavg_mid)
            tpos = np.insert(tpos, 0, 0.0)
        else:  # temporal
            print("setting ytu.dat for temporal case")
            tpos = np.array(times[0:ntimes])
            dt = tpos[1:] - tpos[0:-1]
            ypos = np.cumsum(dt * uavg_mid)
            ypos = np.insert(ypos, 0, 0.0)
        ytu = np.vstack([ypos, tpos, uavg]).T
        np.savetxt(DI['pdir'] + 'ytu.dat',
                   ytu,
                   header=" y(m) t(s) u(m/s)",
                   fmt="%15.8e ",
                   comments=commentHdr)

    #--------------------------------------------------------------------------------------------
    #get jet width data to use for filter--this uses width from the fit line for the coldJet_base_gDens120 case, but could use a fit line from experimental data as long as the downstream distances match
    if filter:
        wfile = '/home/abaumga/odt2.0/post/coldJet/base_width.dat'
        #wfile = DI['pdir']+"fwhm_cl_uvel.dat"  #format same as fwhm_cl_uvel.dat
        jetWidth = np.loadtxt(wfile, comments=commentHdr)
        D = get_inputFileParameter(DI, ("initParams", "djeti"))
        width = np.zeros(ntimes)
        for itime in range(ntimes):
            width[itime] = (jetWidth[itime, 1]) * D

    #--------------------------------------------------------------------------------------------
    # write the mean and std data

    head = "x_(m)           "
    for i, time in enumerate(times):
        hi = "loc_" + str(i + 2) + "_" + str(time)
        hi = hi + (22 - len(hi)) * " "
        head = head + hi

    if filter:
        wndws = np.zeros(ntimes)
        for itime in range(ntimes):
            wndws[itime] = (
                0.1 * width[itime]
            ) / Dx  #window size is 10% of the unfiltered fwhm value at that dumptime

    mav_means = means
    mav_sig = sig
    if filter:
        for i in range(nvar + 2):  #nvar+2 so TKEdiss/logTKEdiss filtered
            for j in range(ntimes):
                wndw = int(wndws[j])
                for k in range(nx):
                    if k >= int(wndw / 2):
                        mav_means[j, i, k] = np.mean(
                            means[j, i, k - int(wndw / 2):k + int(wndw / 2)])
                        mav_sig[j, i, k] = np.mean(
                            sig[j, i, k - int(wndw / 2):k + int(wndw / 2)])

    for i in range(nvar):
        var = mav_means[:, i, :]
        var = np.reshape(var, (ntimes, nx))
        var = np.vstack([X, var]).T
        fname = DI['pdir'] + "means_" + varNames[i] + ".dat"
        np.savetxt(fname, var, header=head, fmt="%15.8e ", comments=commentHdr)

    for i in range(nvar):
        var = mav_sig[:, i, :]
        var = np.reshape(var, (ntimes, nx))
        var = np.vstack([X, var]).T
        fname = DI['pdir'] + "sig_" + varNames[i] + ".dat"
        np.savetxt(fname, var, header=head, comments=commentHdr)

    var = mav_means[:, nvar, :]
    var = np.reshape(var, (ntimes, nx))
    var = np.vstack([X, var]).T
    fname = DI['pdir'] + "means_TKEdiss.dat"
    np.savetxt(fname, var, header=head, fmt="%15.8e ", comments=commentHdr)

    var = mav_means[:, nvar + 1, :]
    var = np.reshape(var, (ntimes, nx))
    var = np.vstack([X, var]).T
    fname = DI['pdir'] + "means_logTKEdiss.dat"
    np.savetxt(fname, var, header=head, fmt="%15.8e ", comments=commentHdr)

    var = mav_sig[:, nvar, :]
    var = np.reshape(var, (ntimes, nx))
    var = np.vstack([X, var]).T
    fname = DI['pdir'] + "sig_TKEdiss.dat"
    np.savetxt(fname, var, header=head, fmt="%15.8e ", comments=commentHdr)

    var = mav_sig[:, nvar + 1, :]
    var = np.reshape(var, (ntimes, nx))
    var = np.vstack([X, var]).T
    fname = DI['pdir'] + "sig_logTKEdiss.dat"
    np.savetxt(fname, var, header=head, fmt="%15.8e ", comments=commentHdr)

    #--------------------------------------------------------------------------------------------
    # write the conditional mean and std data

    if doConditional:

        head = "x_(m)           "
        for i, time in enumerate(times):
            hi = "loc_" + str(i + 2) + "_" + str(time)
            hi = hi + (22 - len(hi)) * " "
            head = head + hi

        for i in range(nvar):
            var = cmeans[:, i, :]
            var = np.reshape(var, (ntimes, nfbins))
            var = np.vstack([fbins, var]).T
            fname = DI['pdir'] + "cmeans_" + varNames[i] + ".dat"
            np.savetxt(fname,
                       var,
                       header=head,
                       fmt="%15.8e ",
                       comments=commentHdr)

        for i in range(nvar):
            var = csig[:, i, :]
            var = np.reshape(var, (ntimes, nfbins))
            var = np.vstack([fbins, var]).T
            fname = DI['pdir'] + "csig_" + varNames[i] + ".dat"
            np.savetxt(fname, var, header=head, comments=commentHdr)
Example #4
0
def uvelScatter(DI):  #uses one case, NOT a list of cases

    dataFiles = glob.glob(DI['cdir'] + "data_py/data_*.npy")
    ntimes = len(dataFiles)
    nrlz = get_nRlz(DI)
    varNames = get_dataHeaderVars(DI)
    times = get_inputFileParameter(
        DI, ("dumpTimes", ))  # times or ypositions if spatial
    times = times[
        0:
        ntimes]  # limit times to number of dataFiles, not all listed dumpTimes
    nvar = len(varNames)

    matplotlib.rcParams.update({
        'font.size': 20,
        'figure.autolayout': True
    })  #, 'font.weight':'bold'})

    #comment or uncomment the following sections for either 4 plots or 1 plot (do not have both uncommented)

    ####FOR 4 SIDE-BY-SIDE PLOTS FOR 4 DUMPTIMES####

    #    fig,axL=plt.subplots(2,2)
    #    dmps=['00005','00010','00015','00020']
    #    for dmp in range(len(dmps)):

    #        fname = DI['cdir']+'data_py/data_py_'+dmps[dmp]+'.npy'
    #        data_all = np.load(fname)
    #        pos_all = data_all[:,0]
    #        uvel_all = data_all[:,4]
    #        iplt = np.random.choice(pos_all.shape[0],10000,replace=False)

    #        for i in iplt:
    #            if dmp==0:
    #                axL[0,0].scatter(pos_all[i],uvel_all[i],s=0.5,c='b')
    #                axL[0,0].set_title('00005')
    #            if dmp==1:
    #                axL[0,1].scatter(pos_all[i],uvel_all[i],s=0.5,c='b')
    #                axL[0,1].set_title('00010')
    #            if dmp==2:
    #                axL[1,0].scatter(pos_all[i],uvel_all[i],s=0.5,c='b')
    #                axL[1,0].set_title('00015')
    #            if dmp==3:
    #                axL[1,1].scatter(pos_all[i],uvel_all[i],s=0.5,c='b')
    #                axL[1,1].set_title('00020')
    #    for ax in axL.flat:
    #        ax.set(xlabel='pos', ylabel='uvel')
    #    for ax in axL.flat:
    #        ax.label_outer()
    #    plt.savefig(DI['pdir']+'uvelScatter'.replace(",","o"))

    ####FOR 1 PLOT FOR 1 DUPMTIME####

    fig, axL = plt.subplots()

    fname = DI['cdir'] + 'data_py/data_py_00010.npy'  #change dmp file name here
    data_all = np.load(fname)
    pos_all = data_all[:, 0]
    uvel_all = data_all[:, 4]
    iplt = np.random.choice(pos_all.shape[0], 10000, replace=False)

    for i in iplt:
        axL.scatter(pos_all[i], uvel_all[i], s=0.5, c='b')
    axL.set_ylabel('uvel')
    axL.set_xlabel('pos')
    axL.set_title(DI['cn'][8:])
    plt.savefig('../../data/plots_coldJet/uvelScatter_00010' +
                DI['cn'][8:].replace(",", "o"))  #change dmp file name here
Example #5
0
def get_pdfs(DI, favre=False, nbins=60):

    #--------------------------------------------------------------------------------------------
    
    dataFiles = glob.glob(DI['cdir']+"data_py/data_*.npy")
    ntimes    = len(dataFiles)
    nrlz      = get_nRlz(DI)
    varNames  = get_dataHeaderVars(DI)
    times     = get_inputFileParameter(DI, ("dumpTimes",))         # times or ypositions if spatial
    times     = times[0:ntimes]    # limit times to number of dataFiles, not all listed dumpTimes
    nvar      = len(varNames)
    cCoord    = get_inputFileParameter(DI, ("params", "cCoord"))
    dxmin     = get_inputFileParameter(DI, ("params", "dxmin"))
    dxmax     = get_inputFileParameter(DI, ("params", "dxmax"))
    L         = get_inputFileParameter(DI, ("params", "domainLength"))
    umax      =  get_inputFileParameter(DI, ("initParams", "vel_max"))
    umin      =  get_inputFileParameter(DI, ("initParams", "vel_min"))
    vmin      = -0.05*np.abs(umax-umin)
    vmax      = 0.05*np.abs(umax-umin)
    wmin      = -0.05*np.abs(umax-umin)
    wmax      = 0.05*np.abs(umax-umin)
    x0, xL    = get_domainBounds(DI) 

    dxmin *= L
    dxmax *= L
    
    try :
        iposf = varNames.index("posf")
    except :
        raise ValueError("In basic_stats: no posf variable found")
    try :
        ipos = varNames.index("pos")
    except :
        raise ValueError("In basic_stats: no pos variable found")
    try :
        irho = varNames.index("rho")
    except :
        irho = -1
    try :
        idvisc = varNames.index("dvisc")
    except :
        idvisc = -1
    try :
        imixf = varNames.index("mixf")
    except :
        imixf = -1
    try :
        iuvel = varNames.index("uvel")
    except :
        iuvel = -1
    try :
        ivvel = varNames.index("vvel")
    except :
        ivvel = -1
    try :
        iwvel = varNames.index("wvel")
    except :
        iwvel = -1

    if irho == -1 and favre :
        raise ValueError("In basic_stats: favre is true, but there is no rho in data file")

    dxpdfs = np.zeros((nbins, ntimes))

    P_uvel = np.zeros([nbins,ntimes])
    P_vvel = np.zeros([nbins,ntimes])
    P_wvel   = np.zeros([nbins,ntimes])
    P_diss = np.zeros([nbins,ntimes])
    P_logDiss = np.zeros([nbins,ntimes])
    P_diffU  =  np.zeros([nbins,ntimes])
    #P_logDiffUpos = np.zeros([nbins,ntimes])
    #P_logDiffUneg = np.zeros([nbins,ntimes])

    dumpTimesString = ''

    #--------------------------------------------------------------------------------------------
    
    for itime in range(ntimes) :
        
        dx = np.empty(0)

        dumpTimesString = dumpTimesString + '"P(t='+'{:.2e}'.format(times[itime])+' s)" '
        fname = DI['cdir']+"data_py/data_py_" + "{0:0>5}".format(itime) + ".npy"
        data = np.load(fname)

        print("Processing time # %i of %i" %(itime+1, ntimes))
    
        x  = data[:,ipos]
        xf = data[:,iposf]
        xf = np.append(xf,x[-1]+(xf[-1]-x[-1]))    
        rho = data[:,irho]
        uvel = data[:,iuvel]
        vvel = data[:,ivvel]
        wvel = data[:,iwvel]
        dvisc = data[:,idvisc]

        wt1 = np.abs(np.abs(xf[1:])**cCoord - np.abs(xf[0:-1])**cCoord)
        i = np.where(xf[1:] * xf[0:-1] < 0)[0]
        wt1[i] = np.abs(np.abs(xf[i+1])**cCoord + np.abs(xf[i])**cCoord) 
        # the crossing between realizations is computed correctly, so we don't need to mask end points
        #maskCrossing = (xf[1:] > xf[0:-1])
        wt = ( wt1*rho if favre else wt1 ) #  don't need this: (wt1*rho if favre else wt1)*maskCrossing
        
        #--------------
        j = np.where(  ( uvel - umin ) > 0.0001*(umax-umin) )
        P_uvel[:,itime] , uvel_bins = compute_wpdf(uvel[j], wt[j], umin, umax, nbins)
        P_vvel[:,itime] , vvel_bins = compute_wpdf(vvel[j], wt[j], vmin, vmax, nbins)
        P_wvel[:,itime] , wvel_bins = compute_wpdf(wvel[j], wt[j], wmin, wmax, nbins)

        # dissipation computation
        uvel  = np.append(uvel,uvel[-1])  # get same number of entries as dx
        dvisc = np.append(dvisc,dvisc[-1]) 
        dx = (xf[1:] - xf[0:-1])
        du = ( uvel[1:] - uvel[0:-1] )
        dvisc = 0.5 * ( dvisc[1:] + dvisc[0:-1] )
        diss = dvisc * ( du / dx )**2.0 # uses dx from cell centers and averaged dvisc
        #don't compute dissipation at crossing and don't count points with negligible dissipation
        j = np.where( np.logical_and( dx > 0.0 , diss > 10**(-6) ) )
        P_diss[:,itime] , diss_bins = compute_wpdf(diss[j], wt[j], 0, 1000, nbins)
        P_logDiss[:,itime] , logDiss_bins = compute_wpdf(np.log10(diss[j]), wt[j], -6, 8, nbins)
        #--------------

        #dxpdfs[:,itime], bins = compute_pdf(dx, dxmin, dxmax, nbins)
        dxpdfs[:,itime], bins = compute_pdf(np.log10(dx[j]), np.log10(dxmin), np.log10(dxmax), nbins)

        bdx = np.vstack([bins,dxpdfs.T]).T

    #--------------

#    head = " log(dx)[m]     "  + dumpTimesString
#    for i,time in enumerate(times) :
#        hi = "time_" + str(i+2) + "_" + str(time) 
#        hi = hi + (22-len(hi))*" "
#        head = head + hi

    var = np.vstack([uvel_bins,P_uvel.T]).T
    head = " u[m/s]   "  + dumpTimesString
    np.savetxt(DI['pdir']+'pdfs_uvel.dat', var, header=head, fmt="%15.8e ", comments=commentHdr)

    var = np.vstack([vvel_bins,P_vvel.T]).T
    head = " v[m/s]   "  + dumpTimesString
    np.savetxt(DI['pdir']+'pdfs_vvel.dat', var, header=head, fmt="%15.8e ", comments=commentHdr)

    var = np.vstack([wvel_bins,P_wvel.T]).T
    head = " w[m/s]   "  + dumpTimesString
    np.savetxt(DI['pdir']+'pdfs_wvel.dat', var, header=head, fmt="%15.8e ", comments=commentHdr)

    var = np.vstack([diss_bins,P_diss.T]).T
    head = " TKEdiss   "  + dumpTimesString
    np.savetxt(DI['pdir']+'pdfs_TKEdiss.dat', var, header=head, fmt="%15.8e ", comments=commentHdr)

    var = np.vstack([logDiss_bins,P_logDiss.T]).T
    head = " log10(TKEdiss)   "  + dumpTimesString
    np.savetxt(DI['pdir']+'pdfs_logTKEdiss.dat', var, header=head, fmt="%15.8e ", comments=commentHdr)
def get_dxpdfs(DI, nbins=60):

    #--------------------------------------------------------------------------------------------
    
    dataFiles = glob.glob(DI['cdir']+"data_py/data_*.npy")
    ntimes    = len(dataFiles)
    nrlz      = get_nRlz(DI)
    varNames  = get_dataHeaderVars(DI)
    times     = get_inputFileParameter(DI, ("dumpTimes",))         # times or ypositions if spatial
    times     = times[0:ntimes]    # limit times to number of dataFiles, not all listed dumpTimes
    nvar      = len(varNames)
    dxmin     = get_inputFileParameter(DI, ("params", "dxmin"))
    dxmax     = get_inputFileParameter(DI, ("params", "dxmax"))
    L         = get_inputFileParameter(DI, ("params", "domainLength"))
    x0, xL    = get_domainBounds(DI) 

    dxmin *= L
    dxmax *= L
    
    try :
        iposf = varNames.index("posf")
    except :
        raise ValueError("In basic_stats: no posf variable found")
    try :
        ipos = varNames.index("pos")
    except :
        raise ValueError("In basic_stats: no pos variable found")

    dxpdfs = np.zeros((nbins, ntimes))

    #--------------------------------------------------------------------------------------------
    
    for itime in range(ntimes) :
        
        dx = np.empty(0)

        fname = DI['cdir']+"data_py/data_py_" + "{0:0>5}".format(itime) + ".npy"
        data_all = np.load(fname)
        posf_all   = data_all[:,1]
        dx_all = posf_all[1:]-posf_all[0:-1]

        istarts = np.where(dx_all <0.0)[0] + 1
        istarts = np.insert(istarts, 0, 0.0)

        iends = np.where(dx_all < 0.0)[0]
        iends = np.append(iends, len(posf_all)-1)
        
        nrlz = len(istarts)  # some sims end early so compute nrlz for each time

        for irlz in range(nrlz) : 
    
            print("Processing time # %i of %i; for realization %i of %i" %(itime+1, ntimes, irlz+1, nrlz))
    
            #data = get_data_realization(DI, itime, irlz)
            i_s = istarts[irlz]
            i_e = iends[irlz]

            data = data_all[i_s:i_e+1, :]
            x  = data[:,ipos]
            xf = data[:,iposf]
            xf = np.append(xf,x[-1]+(xf[-1]-x[-1]))       
            dx = np.append(dx, (xf[1:] - xf[0:-1]))

        #--------------

        #dxpdfs[:,itime], bins = compute_pdf(dx, dxmin, dxmax, nbins)
        dxpdfs[:,itime], bins = compute_pdf(np.log10(dx), np.log10(dxmin), np.log10(dxmax), nbins)

        bdx = np.vstack([bins,dxpdfs.T]).T

    #--------------

    head = " log(dx)[m]     "
    for i,time in enumerate(times) :
        hi = "loc_" + str(i+2) + "_" + str(time) 
        hi = hi + (22-len(hi))*" "
        head = head + hi
    np.savetxt(DI['pdir']+'dxpdfs.dat', bdx, header=head, fmt="%15.8e ", comments=commentHdr)
Example #7
0
def basic_stats(DI, nfbins=60, nx=-1, favre=False, do_yt=False):

    #--------------------------------------------------------------------------------------------

    dataFiles = glob.glob(DI['cdir'] + "data_py/data_*.npy")
    ntimes = len(dataFiles)
    nrlz = get_nRlz(DI)
    varNames = get_dataHeaderVars(DI)
    times = get_inputFileParameter(
        DI, ("dumpTimes", ))  # times or ypositions if spatial
    nvar = len(varNames)
    cCoord = get_inputFileParameter(DI, ("params", "cCoord"))
    df = 1.0 / nfbins
    dxmin = get_inputFileParameter(DI, ("params", "dxmin"))
    nx = int(1.0 / dxmin / 10) if nx < 0 else nx
    x0, xL = get_domainBounds(DI)
    if nx % 2 == 0:
        nx = nx + 1  # make it odd for centerline

    try:
        imixf = varNames.index("mixf")
    except:
        imixf = -1
    try:
        irho = varNames.index("rho")
    except:
        irho = -1
    try:
        iposf = varNames.index("posf")
    except:
        raise ValueError("In basic_stats: no posf variable found")
    try:
        ipos = varNames.index("pos")
    except:
        raise ValueError("In basic_stats: no pos variable found")
    try:
        iuvel = varNames.index("uvel")
    except:
        iuvel = -1

    doConditional = True if imixf > -1 else False

    if irho == -1 and favre:
        raise ValueError(
            "In basic_stats: favre is true, but there is no rho in data file")

    if do_yt and (irho == -1 or iuvel == -1):
        raise ValueError(
            "In basic_stats: do_yt is true but missing rho or uvel in data file"
        )

    #--------------------------------------------------------------------------------------------

    X = np.linspace(x0, xL, nx)
    fbins = np.linspace(df / 2.0, 1.0 - df / 2.0, nfbins)

    means = np.zeros((ntimes, nvar, nx))
    mean2 = np.zeros((ntimes, nvar, nx))
    rhoM = np.zeros((ntimes, nx))  # needed for favre, to normalize
    cmeans = np.zeros((ntimes, nvar, nfbins))
    cmean2 = np.zeros((ntimes, nvar, nfbins))
    binNrm = np.zeros((ntimes, nvar, nfbins))  # to normalize conditional means
    rhouu = np.zeros(ntimes)
    rhou = np.zeros(ntimes)

    for itime in range(ntimes):
        for irlz in range(nrlz):

            print("Processing time # %i of %i; for realization %i of %i" %
                  (itime + 1, ntimes, irlz + 1, nrlz))

            data = get_data_realization(DI, itime, irlz)
            x = data[:, ipos]
            xf = data[:, iposf]
            xf = np.append(xf, x[-1] + (xf[-1] - x[-1]))

            if doConditional:
                dx = np.abs(np.abs(xf[1:])**cCoord - np.abs(xf[0:-1])**cCoord)
                i = np.where(xf[1:] * xf[0:-1] < 0)[0]
                dx[i] = np.abs(
                    np.abs(xf[i + 1])**cCoord + np.abs(xf[i])**cCoord)
                ibin = (data[:, imixf] / df).astype(int)
                ibin[np.where(ibin < 0)] = 0
                ibin[np.where(ibin > nfbins - 1)] = nfbins - 1
                wt = dx * data[:, irho] if favre else dx.copy()

            if favre:
                fextrap = extrap1d(x, data[:, irho])
                rho = fextrap(X)
                rhoM[itime, :] = rhoM[itime, :] + rho

            for ivar in range(nvar):

                y = data[:, ivar]
                fextrap = extrap1d(x, y)
                Y = fextrap(X)
                Y2 = Y**2.0

                means[itime,
                      ivar, :] = means[itime,
                                       ivar, :] + (Y * rho if favre else Y)
                mean2[itime,
                      ivar, :] = mean2[itime,
                                       ivar, :] + (Y2 * rho if favre else Y2)

                if doConditional:
                    cmeans[itime, ivar,
                           ibin] = cmeans[itime, ivar, ibin] + y * wt
                    cmean2[itime, ivar,
                           ibin] = cmean2[itime, ivar, ibin] + y * y * wt
                    binNrm[itime, ivar, ibin] = binNrm[itime, ivar, ibin] + wt

            if do_yt:
                fextrap = extrap1d(x, data[:, irho])
                rho = fextrap(X)
                fextrap = extrap1d(x, data[:, iuvel])
                uvel = fextrap(X)
                rhouu[itime] += np.sum(rho * uvel * uvel)
                rhou[itime] += np.sum(rho * uvel)

    if favre:
        means /= rhoM
        mean2 /= rhoM
    else:
        means /= nrlz
        mean2 /= nrlz

    sig2 = mean2 - means * means
    sig = np.sqrt(np.abs(sig2))

    if doConditional:
        cmeans = cmeans / binNrm
        cmean2 = cmean2 / binNrm
        cmeans[np.where(binNrm == 0)] = 0

        csig2 = cmean2 - cmeans * cmeans
        csig = np.sqrt(np.abs(csig2))
        csig[np.where(binNrm == 0)] = 0

    if do_yt:
        uavg = rhouu / rhou
        uavg_mid = 0.5 * (uavg[1:] + uavg[0:-1])
        Lspatial = get_inputFileParameter(DI, ("params", "Lspatial"))
        if Lspatial:
            print("setting ytu.dat for spatial case")
            ypos = np.array(times)
            dy = ypos[1:] - ypos[0:-1]
            tpos = np.cumsum(dy / uavg_mid)
            tpos = np.insert(tpos, 0, 0.0)
        else:  # temporal
            print("setting ytu.dat for temporal case")
            tpos = np.array(times)
            dt = tpos[1:] - tpos[0:-1]
            ypos = np.cumsum(dt * uavg_mid)
            ypos = np.insert(ypos, 0, 0.0)
        ytu = np.vstack([ypos, tpos, uavg]).T
        np.savetxt(DI['pdir'] + 'ytu.dat',
                   ytu,
                   header=" y(m) t(s) u(m/s)",
                   fmt="%15.8e ")

    #--------------------------------------------------------------------------------------------
    # write the mean and std data

    head = "x_(m)           "
    for i, time in enumerate(times):
        hi = str(i + 2) + "_" + str(time)
        hi = hi + (17 - len(hi)) * " "
        head = head + hi

    for i in range(nvar):
        var = means[:, i, :]
        var = np.reshape(var, (ntimes, nx))
        var = np.vstack([X, var]).T
        fname = DI['pdir'] + "means_" + varNames[i] + ".dat"
        np.savetxt(fname, var, header=head, fmt="%15.8e ")

    for i in range(nvar):
        var = sig[:, i, :]
        var = np.reshape(var, (ntimes, nx))
        var = np.vstack([X, var]).T
        fname = DI['pdir'] + "sig_" + varNames[i] + ".dat"
        np.savetxt(fname, var, header=head)

    #--------------------------------------------------------------------------------------------
    # write the conditional mean and std data

    if doConditional:

        head = "x_(m)           "
        for i, time in enumerate(times):
            hi = str(i + 2) + "_" + str(time)
            hi = hi + (17 - len(hi)) * " "
            head = head + hi

        for i in range(nvar):
            var = cmeans[:, i, :]
            var = np.reshape(var, (ntimes, nfbins))
            var = np.vstack([fbins, var]).T
            fname = DI['pdir'] + "cmeans_" + varNames[i] + ".dat"
            np.savetxt(fname, var, header=head, fmt="%15.8e ")

        for i in range(nvar):
            var = csig[:, i, :]
            var = np.reshape(var, (ntimes, nfbins))
            var = np.vstack([fbins, var]).T
            fname = DI['pdir'] + "csig_" + varNames[i] + ".dat"
            np.savetxt(fname, var, header=head)